Ejemplo n.º 1
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #check inputfields
        log.info("Getting commandline parameters")
        if len(sys.argv) == 2:
            input_afvoer = sys.argv[1] #shape

            log.info("input afvoer: %s" % input_afvoer)
        else:
            log.error("Usage: python rural_afvoerpercentages.py <rr_afvoer>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        log.info("A) Read RR_Afvoer")
        kwkident = config.get('GENERAL', 'kwkident').lower()
        if not turtlebase.arcgis.is_fieldname(gp, input_afvoer, kwkident):
            log.error("field %s not found, we cannot continue" % kwkident)
            sys.exit(1)
        afvoer_data = nens.gp.get_table(gp, input_afvoer, primary_key=kwkident)

        log.info("B) Calculate percentages")
        log.info(" - calculate kw's per peilgebied")
        peilgebied = {}
        afvoer_van = config.get('afvoerpercentages', 'afvoer_van').lower()
        for key, value in afvoer_data.items():
            gpg_van = value[afvoer_van]
            if gpg_van in peilgebied:
                peilgebied[gpg_van].append(key)
            else:
                peilgebied[gpg_van] = [key]

        afvoer_data_output = {}
        log.info(" - calculate percentages")
        percentage = config.get('afvoerpercentages', 'percentage')
        for key, value in peilgebied.items():
            perc = 100 / float(len(value))
            for kw in value:
                afvoer_data_output[kw] = {percentage: perc}

                log.info("C) writing to output")
        turtlebase.arcgis.write_result_to_output(input_afvoer, kwkident, afvoer_data_output)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 2
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #---------------------------------------------------------------------
        # Input parameters
        """
        nodig voor deze tool:
        """

        if len(sys.argv) == 5:
            input_polygon_fc = sys.argv[1]
            input_channel_fc = sys.argv[2]
            input_landuse_fc = sys.argv[3]
            output_channel = sys.argv[4]
        else:
            log.warning("usage: python geo_genereren_afv_opp.py <input peilgebieden> <input watergangen> <input landgebruik> <output waterlijnen met oppervlak>")
            sys.exit(1)

        #---------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        #log.debug(" - check <input >: %s" % argument1)
        gpg_obj = gp.describe(input_polygon_fc)
        if gpg_obj.ShapeType != 'Polygon':
            geometry_check_list.append("input peilgebieden does not contain polygons, it contains shapetype: %s" % gpg_obj.ShapeType)
        else:
            log.info(" - input peilgebieden is correct")

        ovk_obj = gp.describe(input_channel_fc)
        if ovk_obj.ShapeType != 'Polyline':
            geometry_check_list.append("input channel does not contain polyline, it contains shapetype: %s" % ovk_obj.ShapeType)
        else:
            log.info(" - input channel is correct")

        lu_obj = gp.describe(input_landuse_fc)
        if lu_obj.ShapeType != 'Polygon':
            geometry_check_list.append("input landuse does not contain polygons, it contains shapetype: %s" % lu_obj.ShapeType)
        else:
            log.info(" - input landuse is correct")
        #"<check geometry from input data, append to list if incorrect>"

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        #---------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")

        missing_fields = []
        ovk_field = config.get('general', 'ovkident')
        gpg_field = config.get('general', 'gpgident')
        landuse_type = config.get('afv_opp', 'landuse_type')

        if not turtlebase.arcgis.is_fieldname(gp, input_polygon_fc, ovk_field):
            log.error("missing field '%s' in %s" % (ovk_field, input_polygon_fc))
            missing_fields.append("%s: %s" % (input_polygon_fc, ovk_field))

        if not turtlebase.arcgis.is_fieldname(gp, input_channel_fc, ovk_field):
            log.error("missing field '%s' in %s" % (ovk_field, input_channel_fc))
            missing_fields.append("%s: %s" % (input_channel_fc, ovk_field))

        if not turtlebase.arcgis.is_fieldname(gp, input_landuse_fc, 'type'):
            log.error("missing field 'type' in %s" % input_landuse_fc)
            missing_fields.append("%s: TYPE" % (input_landuse_fc))

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #---------------------------------------------------------------------
        # Environments

        log.info(" - intersect areas with landuse")
        output_intersect_landuse = workspace_gdb + "/landuse_intersect"
        log.info(output_intersect_landuse)
        gp.intersect_analysis(input_polygon_fc + "; " + input_landuse_fc, output_intersect_landuse)

        landuse_type_list = check_for_landuse_types(gp, output_intersect_landuse, "TYPE")
        if len(landuse_type_list) == 0:
            log.error("missing landuse types 'rural' and 'urban'")
            sys.exit(3)

        #log.info(turtlebase.arcgis.is_fieldname(gp, output_intersect_landuse, 'OPP_LA'))
        if not turtlebase.arcgis.is_fieldname(gp, output_intersect_landuse, 'OPP_LA'):
            log.info("create field OPP_LA")
            gp.addfield(output_intersect_landuse, 'OPP_LA', 'Double')
        if not turtlebase.arcgis.is_fieldname(gp, output_intersect_landuse, 'OPP_ST'):
            log.info("create field OPP_ST")
            gp.addfield(output_intersect_landuse, 'OPP_ST', 'Double')

        if 'urban' in landuse_type_list:
            log.info(" - calculate urban area")
            landuse_urban_lyr = turtlebase.arcgis.get_random_layer_name()
            gp.MakeFeatureLayer_management(output_intersect_landuse, landuse_urban_lyr, " TYPE = 'urban' ")
            turtlebase.arcgis.calculate_area(gp, landuse_urban_lyr, "OPP_ST")

        if 'rural' in landuse_type_list:
            log.info(" - calculate rural area")
            landuse_rural_lyr = turtlebase.arcgis.get_random_layer_name()
            gp.MakeFeatureLayer_management(output_intersect_landuse, landuse_rural_lyr, " TYPE = 'rural' ")
            turtlebase.arcgis.calculate_area(gp, landuse_rural_lyr, "OPP_LA")

        output_dissolve_landuse = workspace_gdb + "/dissolve"
        #tempfiles.append(output_dissolve_landuse)
        log.info("check if output fields exist")
        field_rural = "Sum_OPP_LA"
        field_urban = "Sum_OPP_ST"
        if turtlebase.arcgis.is_fieldname(gp, output_intersect_landuse, field_rural):
            log.info(" - %s already exists, delete field" % field_rural)
            gp.deletefield_management(output_intersect_landuse, field_rural)
        if turtlebase.arcgis.is_fieldname(gp, output_intersect_landuse, field_urban):
            gp.deletefield_management(output_intersect_landuse, field_urban)

        log.info(" - dissolve rural and urban areas")
        remove_null_values(gp, output_intersect_landuse, "OPP_LA")
        remove_null_values(gp, output_intersect_landuse, "OPP_ST")

        gp.Dissolve_management(output_intersect_landuse, output_dissolve_landuse, ovk_field, "OPP_LA sum; OPP_ST sum", "MULTI_PART")

        log.info("Copy landuse area to output")
        dissolve_dict = nens.gp.get_table(gp, output_dissolve_landuse, primary_key=ovk_field.lower())
        output_channel_fc = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        calculate_area_fields(gp, input_channel_fc, output_channel_fc, ovk_field, dissolve_dict, field_rural, field_urban)

        # add from and to coordinates
        log.info("Calculate coordinates")
        update_to_and_from_coordinates(gp, output_channel_fc, ovk_field.lower())
        log.info(" - copy output")
        gp.FeatureclassToFeatureclass_conversion(output_channel_fc, os.path.dirname(output_channel), os.path.basename(output_channel))
        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            #gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 3
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            import tempfile
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #---------------------------------------------------------------------
        # Input parameters
        if len(sys.argv) == 5:
            a_watergang = sys.argv[1]
            bc_watergang = sys.argv[2]
            output_fc = sys.argv[3]
            point_intersection = sys.argv[4]
        else:
            log.warning("usage: <argument1> <argument2>")
            sys.exit(1)
            
        tempfiles = []

        #---------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        if not turtlebase.arcgis.is_file_of_type(gp, bc_watergang, 'Polyline'):
            log.error("%s is not a %s feature class!" % (bc_watergang, 'Polyline'))
            geometry_check_list.append("%s -> (%s)" % (bc_watergang, 'Polyline'))
            
        if not turtlebase.arcgis.is_file_of_type(gp, a_watergang, 'Polyline'):
            log.error("%s is not a %s feature class!" % (a_watergang, 'Polyline'))
            geometry_check_list.append("%s -> (%s)" % (a_watergang, 'Polyline'))
                
        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        #---------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")
        ovkident = "OVKIDENT"
        if not turtlebase.arcgis.is_fieldname(gp, a_watergang, ovkident):
            log.error("missing fields in input data: %s" % ovkident)
            sys.exit(2)
        
        bc_watergang_tmp = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Select_analysis(bc_watergang, bc_watergang_tmp)
        if turtlebase.arcgis.is_fieldname(gp, bc_watergang_tmp, ovkident):
            gp.DeleteField_management(bc_watergang_tmp, ovkident)

        #---------------------------------------------------------------------
        # Environments
        G = nx.Graph()
        
        # iterate through your feature class and build a graph
        rows = gp.SearchCursor(bc_watergang_tmp)
        row = rows.next()
        inDesc = gp.describe(bc_watergang_tmp)
        while row:
            # we need a unique representation for each edges start and end points
            feat = row.GetValue(inDesc.ShapeFieldName)
            objectid = row.GetValue(inDesc.OIDFieldName)
            
            part = feat.getpart(0)
            pnt = part.Next()
            count = 0
            while pnt:
                if count == 0:
                    start_xy = (pnt.X, pnt.Y)
                else:
                    end_xy = (pnt.X, pnt.Y)
                
                pnt = part.Next()
                count += 1
            G.add_edge(start_xy,end_xy,oid=objectid)
            row = rows.next()
        
        # get the connected components
        Components = nx.connected_components(G)
            
        point = gp.CreateObject("POINT")
        point_fc = turtlebase.arcgis.get_random_file_name(workspace, ".shp")
        tempfiles.append(point_fc)
        fc_name = os.path.basename(point_fc)
        rd_new = os.path.join(os.path.dirname(sys.argv[0]), "rdnew.prj")
        gp.AddMessage(rd_new)
        gp.CreateFeatureclass_management(os.path.dirname(point_fc), fc_name, "POINT","#","DISABLED","DISABLED", rd_new,"#", "0","0","0")
        gp.AddField_management(point_fc, ovkident, "TEXT")
        
        rows_ic = gp.InsertCursor(point_fc)
        for ident, xy in enumerate(Components):
            for pnt in xy:
                point.X = pnt[0]
                point.Y = pnt[1]
        
                newfeature = rows_ic.NewRow()
                newfeature.shape = point
                newfeature.SetValue(ovkident, "bc_%s" % ident)
                rows_ic.InsertRow(newfeature)
        
        temp_fc = turtlebase.arcgis.get_random_file_name(workspace, ".shp")
        tempfiles.append(temp_fc)
        gp.SpatialJoin_analysis(bc_watergang_tmp, point_fc, temp_fc,"JOIN_ONE_TO_ONE")
        
        output_fc_line = turtlebase.arcgis.get_random_file_name(workspace, ".shp")
        tempfiles.append(output_fc_line)
        gp.Dissolve_management(temp_fc,output_fc_line,ovkident,"#","MULTI_PART","DISSOLVE_LINES")
        
        gp.merge_management("%s;%s"% (a_watergang, output_fc_line), output_fc)
        gp.Intersect_analysis("%s #;%s #" % (a_watergang, bc_watergang_tmp), point_intersection,"ALL","#","POINT")
                
        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            for tempfile in tempfiles:
                if gp.exists(tempfile):
                    gp.delete(tempfile)
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 4
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        # --------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = (
            turtlebase.arcgis.create_temp_geodatabase(gp, workspace))
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        # --------------------------------------------------------------------
        # check inputfields
        log.info("Getting commandline parameters")

        if len(sys.argv) == 6:
            input_peilgebieden = sys.argv[1]
            input_waterlevel_table = sys.argv[2]
            input_ahn_raster = sys.argv[3]
            output_inundation = sys.argv[4]
            output_folder_waterlevel = sys.argv[5]
        else:
            log.error("Usage: python rural_inundatie.py <peilgebieden feature>\
                <input_waterlevel_table> <input_ahn_raster> <output grid>")
            sys.exit(1)

        # --------------------------------------------------------------------
        #check input parameters
        log.info('Checking presence of input files')
        if not(gp.exists(input_peilgebieden)):
            log.error("inputfile peilgebieden: %s does not exist!",
                      input_peilgebieden)
            sys.exit(5)
        if not(gp.exists(input_waterlevel_table)):
            log.error("inputfile resultaten: %s does not exist!",
                      input_waterlevel_table)
            sys.exit(5)
        if not(gp.exists(input_ahn_raster)):
            log.error("inputfile hoogtegrid: %s does not exist!",
                      input_ahn_raster)
            sys.exit(5)

        log.info('input parameters checked')
        # --------------------------------------------------------------------
        # Check geometry input parameters
        cellsize = gp.describe(input_ahn_raster).MeanCellHeight

        log.info("Check geometry of input parameters")
        geometry_check_list = []

        log.debug(" - check level areas: %s" % input_peilgebieden)
        if gp.describe(input_peilgebieden).ShapeType != 'Polygon':
            log.error("Input level area is not a polygon feature class!")
            geometry_check_list.append(input_peilgebieden + " -> (Polygon)")

        log.debug(" - check ahn raster %s" % input_ahn_raster)
        if gp.describe(input_ahn_raster).DataType != 'RasterDataset':
            log.error("Input AHN is not a raster dataset")
            sys.exit(1)

        if gp.describe(input_ahn_raster).PixelType[0] not in ['U', 'S']:
            log.error("Input AHN is a floating point raster,\
             for this script an integer is nessecary")
            geometry_check_list.append(input_ahn_raster + " -> (Integer)")

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)

        log.info('input format checked')
        #---------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")
        gpgident = config.get('General', 'gpgident')

        missing_fields = []

        # create return period list
        return_periods = config.get(
            'Inundatie', 'herhalingstijden').split(", ")
        log.debug(" - return periods: %s" % return_periods)

        # <check required fields from input data,
        # append them to list if missing>"
        if not turtlebase.arcgis.is_fieldname(
                    gp, input_peilgebieden, gpgident):
            log.debug(" - missing: %s in %s",
                      (gpgident, input_peilgebieden))
            missing_fields.append("%s: %s",
                    (input_peilgebieden, gpgident))

        if not turtlebase.arcgis.is_fieldname(
                gp, input_waterlevel_table, gpgident):
            log.debug(" - missing: %s in %s",
                      (gpgident, input_waterlevel_table))
            missing_fields.append("%s: %s",
                    (input_waterlevel_table, gpgident))

        for return_period in return_periods:
            if not turtlebase.arcgis.is_fieldname(
                    gp, input_waterlevel_table, "WS_%s" % return_period):
                log.debug(" - missing: %s in %s" % ("WS_%s",
                                return_period, input_waterlevel_table))
                missing_fields.append("%s: %s",
                        (input_waterlevel_table, "WS_%s" % return_period))

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)

        #---------------------------------------------------------------------
        # Environments
        log.info("Setting environments")
        temp_peilgebieden = (
                turtlebase.arcgis.get_random_file_name(workspace_gdb))
        log.debug(" - export level areas")
        gp.select_analysis(input_peilgebieden, temp_peilgebieden)

        # use extent from level areas
        gp.extent = gp.describe(temp_peilgebieden).extent

        # add waterlevel to peilgebieden
        log.info("Read waterlevels from table")
        waterlevel_dict = nens.gp.get_table(
            gp, input_waterlevel_table, primary_key=gpgident.lower())
        join_waterlevel_to_level_area(
                    gp, temp_peilgebieden, gpgident,
                    return_periods, waterlevel_dict)

        #---------------------------------------------------------------------
        log.info("A) Create rasters for waterlevels")
        # Create waterlevel rasters
        if output_folder_waterlevel == "#":
            output_folder_waterlevel = workspace_gdb

        for return_period in return_periods:
            log.info(" - create raster for ws_%s" % return_period)
            out_raster_dataset = os.path.join(
                                    output_folder_waterlevel,
                                     "ws_%s" % return_period)
            if not gp.exists(out_raster_dataset):
                input_field = "WS_%s" % return_period
                gp.FeatureToRaster_conversion(temp_peilgebieden,
                                              input_field,
                                              out_raster_dataset,
                                              int(cellsize))
            else:
                log.error("output waterlevel raster already exists,\
                delete this first or change output folder")
                sys.exit(1)

        #---------------------------------------------------------------------
        log.info("B) Create Inundation raster")
        inundation_raster_list = []

        # create ahn ascii
        ahn_ascii = turtlebase.arcgis.get_random_file_name(workspace, ".asc")
        log.debug("ahn ascii: %s" % ahn_ascii)
        gp.RasterToASCII_conversion(input_ahn_raster, ahn_ascii)

        # inundatie stedelijk
        return_period_urban = config.get(
            'Inundatie', 'herhalingstijd_inundatie_stedelijk')
        if config.get('Inundatie', 'percentage_inundatie_stedelijk') != "-":
            log.debug(" - create inundation urban")
            waterlevel = "%s/ws_%s" % (
                output_folder_waterlevel, return_period_urban)
            if gp.exists(waterlevel):
                inundation_urban = turtlebase.arcgis.get_random_file_name(
                                                          workspace, ".asc")
                turtlebase.spatial.create_inundation_raster(
                                    ahn_ascii, ahn_ascii, waterlevel, 1,
                                    return_period_urban, inundation_urban,
                                    workspace, use_lgn=False)
                inundation_raster_list.append(inundation_urban)
            else:
                log.error("%s does not exists! check ini-file and tempfolder",
                        waterlevel)

        # inundatie hoogwaardige landbouw
        return_period_agriculture = config.get(
            'Inundatie', 'herhalingstijd_inundatie_hoogwaardig')
        if config.get('Inundatie', 'percentage_inundatie_hoogwaardig') != "-":
            log.debug(" - create inundation agriculture")
            waterlevel = "%s/ws_%s" % (
                        output_folder_waterlevel, return_period_agriculture)
            if gp.exists(waterlevel):
                # Inundation with lgn
                inundation_agriculture = (
                    turtlebase.arcgis.get_random_file_name(
                                            workspace, ".asc"))
                turtlebase.spatial.create_inundation_raster(
                                    ahn_ascii, ahn_ascii, waterlevel,
                                    2, return_period_agriculture,
                                    inundation_agriculture, workspace,
                                    use_lgn=False)
                inundation_raster_list.append(inundation_agriculture)
            else:
                log.error("%s does not exists! check ini-file and tempfolder",
                          waterlevel)

        # inundatie akkerbouw
        return_period_rural = config.get(
            'Inundatie', 'herhalingstijd_inundatie_akker')
        if config.get('Inundatie', 'percentage_inundatie_akker') != "-":
            log.debug(" - create inundation rural")
            waterlevel = "%s/ws_%s" % (
                output_folder_waterlevel, return_period_rural)
            if gp.exists(waterlevel):
                inundation_rural = turtlebase.arcgis.get_random_file_name(
                                                        workspace, ".asc")
                turtlebase.spatial.create_inundation_raster(
                                ahn_ascii, ahn_ascii, waterlevel,
                                3, return_period_rural, inundation_rural,
                                workspace, use_lgn=False)
                inundation_raster_list.append(inundation_rural)
            else:
                log.error("%s does not exists! check ini-file and tempfolder",
                          waterlevel)

        # inundatie grasland
        return_period_grass = config.get(
            'Inundatie', 'herhalingstijd_inundatie_grasland')
        if config.get('Inundatie', 'percentage_inundatie_grasland') != "-":
            log.debug(" - create inundation grass")
            waterlevel = ("%s/ws_%s" % (
                            output_folder_waterlevel,
                            return_period_grass))
            if gp.exists(waterlevel):
                inundation_grass = turtlebase.arcgis.get_random_file_name(
                                                        workspace, ".asc")
                turtlebase.spatial.create_inundation_raster(
                                ahn_ascii, ahn_ascii, waterlevel,
                                4, return_period_grass, inundation_grass,
                                workspace, use_lgn=False)
                inundation_raster_list.append(inundation_grass)
            else:
                log.error("%s does not exists! check ini-file and tempfolder",
                          waterlevel)

        if len(inundation_raster_list) > 1:
            log.info(" - merge inundation rasters")
            turtlebase.spatial.merge_ascii(
                inundation_raster_list, output_inundation, workspace)
        else:
            log.error("there are no inundation rasters available")

        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            #gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        tempfiles = os.listdir(workspace)
        for tempfile in tempfiles:
            if tempfile.endswith('.asc'):
                try:
                    os.remove(os.path.join(workspace, tempfile))
                except Exception, e:
                    log.debug(e)

        mainutils.log_footer()
Ejemplo n.º 5
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        #ernst rekenklasse
        ernst_drainage = ernst()

        #----------------------------------------------------------------------------------------
        #check inputfields
        log.info("Getting commandline parameters... ")
        if len(sys.argv) == 7:
            file_input_peilgebieden_feature = sys.argv[1] #shape
            file_input_peilvakgegevens = sys.argv[2] #[ZOMERPEIL],[WINTERPEIL]
            file_input_kwelstroom = sys.argv[3] #[KWELSTROOM]
            file_input_maaiveldkarakteristiek = sys.argv[4] #[MV_HGT_50]
            file_input_bodemsoort = sys.argv[5] #shape
            file_output = sys.argv[6]
        else:
            log.error("Usage: python rural_drainageparameter.py <peilgebieden shape> <peilvakgegevens> <kwelstroom> <maaiveldkarakteristiek> <bodemsoort shape> <outputtabel HydroBase>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        # Check geometry
        log.info("Check geometry of input parameters")
        if not turtlebase.arcgis.is_file_of_type(gp, file_input_peilgebieden_feature, 'Polygon'):
            log.error("Input %s does not contain polygons" % file_input_peilgebieden_feature)
            sys.exit(1)
        if not turtlebase.arcgis.is_file_of_type(gp, file_input_bodemsoort, 'Polygon'):
            log.error("Input %s does not contain polygons" % file_input_bodemsoort)
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        # Check required fields
        log.info("Check required fields in input data")
        peilgebied_id = config.get('GENERAL', 'gpgident')
        pawn_code = config.get('Ernst', 'input_bodemsoort_code')

        missing_fields = []
        check_fields = {file_input_peilgebieden_feature: peilgebied_id,
                      file_input_peilvakgegevens: peilgebied_id,
                      file_input_peilvakgegevens: config.get('Ernst', 'peilvakgegevens_zomerpeil'),
                      file_input_peilvakgegevens: config.get('Ernst', 'peilvakgegevens_winterpeil'),
                      file_input_kwelstroom: peilgebied_id,
                      file_input_kwelstroom: config.get('Ernst', 'kwelstroom_kwelstroom'),
                      file_input_maaiveldkarakteristiek: peilgebied_id,
                      file_input_maaiveldkarakteristiek: config.get('Ernst', 'maaiveldkarakteristiek_value'),
                      file_input_bodemsoort: pawn_code}

        for input_file, field in check_fields.items():
            if not turtlebase.arcgis.is_fieldname(gp, input_file, field):
                log.error("Missing field %s in %s" % (field, input_file))
                missing_fields.append("missing %s in %s" % (field, input_file))

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #----------------------------------------------------------------------------------------
        # Check record count
        log.info("Check records of input parameters")
        count_area = turtlebase.arcgis.fc_records(gp, file_input_peilgebieden_feature)
        count_surface_level_table = turtlebase.arcgis.fc_records(gp, file_input_peilvakgegevens)
        count_seepage = turtlebase.arcgis.fc_records(gp, file_input_kwelstroom)
        count_scurve = turtlebase.arcgis.fc_records(gp, file_input_maaiveldkarakteristiek)

        if count_surface_level_table != count_area:
            log.error("input %s (%s records) contains not the same records as %s (%s records)" % (file_input_peilvakgegevens, count_surface_level_table,
                                                                                                 file_input_peilgebieden_feature, count_area))
            sys.exit(2)
        if count_seepage != count_area:
            log.error("input %s (%s records) contains not the same records as %s (%s records)" % (file_input_kwelstroom, count_seepage,
                                                                                                 file_input_peilgebieden_feature, count_area))
            sys.exit(2)
        if count_scurve != count_area:
            log.error("input %s (%s records) contains not the same records as %s (%s records)" % (file_input_maaiveldkarakteristiek,
                                                                                                 count_scurve, file_input_peilgebieden_feature, count_area))
            sys.exit(2)

        #----------------------------------------------------------------------------------------
        #A: bodemsoort
        log.info("A-1) Copy peilgebieden to temporary workspace")
        temp_peilgebieden = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.select_analysis(file_input_peilgebieden_feature, temp_peilgebieden)

        log.info("A-2) Copy bodemsoort to temporary workspace")
        temp_bodemsoort = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.select_analysis(file_input_bodemsoort, temp_bodemsoort)

        log.info("A-3) Intersect bodemsoort + peilgebieden -> peilg+bodem")
        temp_intersect_bodem_peilgebieden = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Intersect_analysis(temp_peilgebieden + "; " + temp_bodemsoort, temp_intersect_bodem_peilgebieden)

        log.info("A-4) Dissolve peilg+bodem")
        temp_dissolve_bodem_peilgebieden = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Dissolve_management (temp_intersect_bodem_peilgebieden, temp_dissolve_bodem_peilgebieden, peilgebied_id + " ;" + pawn_code, "")

        log.info("A-5) Read peilg+bodem(dissolve)")
        log.info(" - reading shape")
        peilv_grondsoort = {}
        row = gp.SearchCursor(temp_dissolve_bodem_peilgebieden)
        for item in nens.gp.gp_iterator(row):
            area_id = item.GetValue(peilgebied_id)
            soil_id = item.GetValue(pawn_code)
            area = item.Shape.Area
            data_row = {'pawn_code': soil_id, 'area': area}
            if not(peilv_grondsoort.has_key(area_id)):
                peilv_grondsoort[area_id] = {'grondsoort':[]}
            peilv_grondsoort[area_id]['grondsoort'].append(data_row)

        log.info(" - sorting")
        for key in peilv_grondsoort.keys():
            peilv_grondsoort[key]['grondsoort'].sort(sort_area_rev)
            peilv_grondsoort[key]['area'] = sum_grondsoort(peilv_grondsoort[key]['grondsoort'])

        # ---------------------------------------------------------------------------
        #B: ernst parameters

        #inlezen van shape files: [ZOMERPEIL, WINTERPEIL, KWELSTROOM, MV_HGT_50]
        log.info("B-1) Reading inputfile peilvakgegevens")
        data_set = {}

        row = gp.SearchCursor(file_input_peilvakgegevens)
        for item in nens.gp.gp_iterator(row):
            field_id = item.GetValue(peilgebied_id)
            data_set[field_id] = {}
            data_set[field_id]['zomerpeil'] = item.GetValue(config.get('Ernst', 'peilvakgegevens_zomerpeil'))
            data_set[field_id]['winterpeil'] = item.GetValue(config.get('Ernst', 'peilvakgegevens_winterpeil'))

            if (data_set[field_id]['zomerpeil'] < float(config.get('Ernst', 'validate_min_zomerpeil'))) or (data_set[field_id]['zomerpeil'] > float(config.get('Ernst', 'validate_max_zomerpeil'))):
                log.error("zomerpeil has a non-valid value of " + str(data_set[field_id]['zomerpeil']))
                sys.exit(5)
            if (data_set[field_id]['winterpeil'] < float(config.get('Ernst', 'validate_min_winterpeil'))) or (data_set[field_id]['zomerpeil'] > float(config.get('Ernst', 'validate_max_winterpeil'))):
                log.error("winterpeil has a non-valid value of " + str(data_set[field_id]['winterpeil']))
                sys.exit(5)

        #inlezen van shape files: [ZOMERPEIL, WINTERPEIL, KWELSTROOM, MV_HGT_50]
        log.info("B-2) Reading inputfile kwelstroom")
        row = gp.SearchCursor(file_input_kwelstroom)
        for item in nens.gp.gp_iterator(row):
            field_id = item.GetValue(peilgebied_id)
            if not(data_set.has_key(field_id)):
                log.error("non-matching kwelstroom and peilvakgegevens, check if peilvakgegevens has key '" + field_id + "'")
                sys.exit(9)
            data_set[field_id]['kwel'] = item.GetValue(config.get('Ernst', 'kwelstroom_kwelstroom'))

        #inlezen van shape files: [ZOMERPEIL, WINTERPEIL, KWELSTROOM, MV_HGT_50]
        log.info("B-3) Reading inputfile maaiveldkarakteristiek")
        row = gp.SearchCursor(file_input_maaiveldkarakteristiek)
        for item in nens.gp.gp_iterator(row):
            field_id = item.GetValue(peilgebied_id)
            if not(data_set.has_key(field_id)):
                log.error("non-matching maaiveldkarakteristiek and peilvakgegevens, check if peilvakgegevens has key '" + field_id + "'")
                sys.exit(9)
            data_set[field_id]['maaiveld'] = item.GetValue(config.get('Ernst', 'maaiveldkarakteristiek_value'))

        # ---------------------------------------------------------------------------
        #check input: each record should contain all fields (count: 4)
        log.info("B-4) Checking input")
        for key, value in data_set.items():
            if len(value.items()) != 4:
                log.error(key, value)
                log.error("check if inputfiles match with eachother!")
                sys.exit(6)

        # ---------------------------------------------------------------------------
        #bepaling drooglegging: [DL] = [MV_HGT_50] - max([WINTERPEIL], [ZOMERPEIL])
        #bepaling drainageweerstand [ALFA_LZ] = xx * [DL} - yy, waarbij xx, yy afhangen van de klasse
        #bepaling INF_OPWAT, OPP_AFVOER
        log.info("B-6) preparing data for output")
        data_set_output = {}
        import time
        date_str = time.strftime("%d %B %Y %H:%M:%S")
        log.info("Calculating GRONDSOORT, drooglegging, ALFA_LZ, INF_OPWAT, OPP_AFVOER... ")
        log.info(" - Datum-string: " + date_str)
        for key, item in data_set.items():
            #print key, item
            data_set[key]['drooglegging'] = ernst_drainage.calc_dl(item['maaiveld'], item['zomerpeil'], item['winterpeil'])
            data_set_output[key] = {}
            data_set_output[key][peilgebied_id] = key #important!
            data_set_output[key][config.get('Ernst', 'output_alfa_lz')] = ernst_drainage.calc_alfa(data_set[key]['kwel'], data_set[key]['drooglegging'])
            data_set_output[key][config.get('Ernst', 'output_inf_opwat')] = 500 #of dataset['key']['ALFA_LZ']*1.5
            data_set_output[key][config.get('Ernst', 'output_opp_afvoer')] = 0.5
            grondsrt_str = ""
            try:
                data_set_output[key][config.get('Ernst', 'output_grondsoort')] = peilv_grondsoort[key]['grondsoort'][0]['pawn_code']
                for idx in range(min(len(peilv_grondsoort[key]['grondsoort']), 5)):
                    grondsrt_str = grondsrt_str + str(peilv_grondsoort[key]['grondsoort'][idx]['pawn_code']) + "(" + str(int(100 * peilv_grondsoort[key]['grondsoort'][idx]['area'] / peilv_grondsoort[key]['area'])) + "%) "
            except Exception, e:
                log.warning(e)
                log.warning("id " + key + " has no " + config.get('Ernst', 'output_grondsoort') + " value!")
                data_set_output[key][config.get('Ernst', 'output_grondsoort')] = -1
            source_str = "grondsrt:" + grondsrt_str + "pv:" + os.path.basename(file_input_peilvakgegevens) + " kwel:" + os.path.basename(file_input_kwelstroom) + " mv:" + os.path.basename(file_input_maaiveldkarakteristiek)
            if len(source_str) > 50:
                source_str = source_str[:50]
            data_set_output[key]['SOURCE'] = source_str
            data_set_output[key]['DATE_TIME'] = date_str

        # ---------------------------------------------------------------------------
        #C: output
        #add cols [ALFA_LZ], [INF_OPWAT], [OPP_AFVOER]
        drainageFields = {peilgebied_id: {'type': 'TEXT', 'length': '30'},
                          config.get('Ernst', 'output_alfa_lz'):{'type': 'DOUBLE'},
                          config.get('Ernst', 'output_inf_opwat'):{'type': 'DOUBLE'},
                          config.get('Ernst', 'output_opp_afvoer'):{'type': 'DOUBLE'},
                          config.get('Ernst', 'output_grondsoort'):{'type': 'INTEGER'},
                          'SOURCE':{'type': 'TEXT', 'length': '256'},
                          'DATE_TIME':{'type': 'TEXT', 'length': '40'},
                          'COMMENTS':{'type': 'TEXT', 'length': '256'}}

        #check if output_table exists. if not, create with correct rows
        log.info("C-1) Checking output table... ")
        if not(gp.exists(file_output)):
            gp.CreateTable(os.path.dirname(file_output), os.path.basename(file_output))

        #check if output_table has the correct rows
        log.info("C-2) Checking fields... ")
        for field_name, field_settings in drainageFields.items():
            if field_settings.has_key('length'):
                if not turtlebase.arcgis.is_fieldname(gp, file_output, field_name):
                    gp.AddField(file_output, field_name, field_settings['type'], '#', '#', field_settings['length'])
            else:
                if not turtlebase.arcgis.is_fieldname(gp, file_output, field_name):
                    gp.AddField(file_output, field_name, field_settings['type'])

        # ---------------------------------------------------------------------------
        # Write results to output table
        log.info("Write results to output table")
        turtlebase.arcgis.write_result_to_output(file_output, peilgebied_id, data_set_output)
        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
Ejemplo n.º 6
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #---------------------------------------------------------------------
        # Input parameters
        """
        nodig voor deze tool:
        """
        if len(sys.argv) == 6:
            log.info("Reading input parameters")
            mpoint = sys.argv[1]
            hydroline = sys.argv[2]
            output_xyz = sys.argv[3]
            output_yz = sys.argv[4]
            output_locations = sys.argv[5]
        else:
            log.warning("usage: <hydroline> <mpoint> <output_xyz> <output_yz>")
            sys.exit(1)

        #---------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        #log.debug(" - check <input >: %s" % argument1)

        #"<check geometry from input data, append to list if incorrect>"

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        #---------------------------------------------------------------------
        ovkident = 'ovkident'
        proident = 'proident'
        zcoord = 'ZH'
        # Check required fields in input data
        log.info("Check required fields in input data")

        missing_fields = []

        #<check required fields from input data,
        #        append them to list if missing>
        check_fields = {hydroline: [ovkident],
                        mpoint: [proident, zcoord]}
        #check_fields = {input_1: [fieldname1, fieldname2],
        #                 input_2: [fieldname1, fieldname2]}
        for input_fc, fieldnames in check_fields.items():
            for fieldname in fieldnames:
                if not turtlebase.arcgis.is_fieldname(
                        gp, input_fc, fieldname):
                    errormsg = "fieldname %s not available in %s" % (
                                    fieldname, input_fc)
                    log.error(errormsg)
                    missing_fields.append(errormsg)

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #---------------------------------------------------------------------
        multipoints = turtlebase.arcgis.get_random_file_name(workspace, ".shp")
        log.info("Dissolving pointcloud to multipoint")
        gp.Dissolve_management(mpoint, multipoints, proident)

        if output_locations == '#':
            output_locations = (
                turtlebase.arcgis.get_random_file_name(workspace_gdb))
        log.info("Calculating coordinates of centerpoints")
        create_centroids(gp, multipoints, output_locations, proident)

        centerpoints_sj = turtlebase.arcgis.get_random_file_name(workspace, ".shp")
        log.info("Calculation adjacent hydrolines")
        gp.SpatialJoin_analysis(output_locations, hydroline, centerpoints_sj,
                                'JOIN_ONE_TO_ONE', "#", "#", "CLOSEST", 100)

        log.info("Reading center points")
        centerpoints_d = nens.gp.get_table(gp, centerpoints_sj,
                                           primary_key=proident)

        log.info("Reading hydrolines")
        lineparts = get_line_parts(gp, hydroline, ovkident)
        log.info("Reading pointcloud")
        pointcloud = get_pointcloud(gp, mpoint, proident, zcoord)

        log.info("Sorting profiles")
        profiles_xyz, profiles_yz = sort_pointcloud(gp, centerpoints_d,
                                                    lineparts, pointcloud)
        log.info("Write xyz points to output")
        write_profiles_xyz(gp, profiles_xyz, output_xyz)
        log.info("Write yz information to output table")
        write_profiles_yz(gp, profiles_yz, output_yz)

        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(multipoints)
            gp.delete(centerpoints_sj)
            

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()

    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 7
0
def main():
    try:
        """
        Deze module controleert of hoeveel procent duikers onder streefpeil liggen en hoeveel procent duikers onder zij onder bodemniveau liggen.
          
        """
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)
        
        # wordt globaal al geladen. maar werkt niet (?)
        
        
        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)
        #---------------------------------------------------------------------
        # Input parameters
        if len(sys.argv) == 9:
            peilgebieden_fc = sys.argv[1]
            input_duikers = sys.argv[2]
            input_stuwen = sys.argv[3]
            input_sifons = sys.argv[4]
            input_waterlopen_legger = sys.argv[5]
            output_duikers = sys.argv[6]
            output_stuwen = sys.argv[7]
            output_sifons = sys.argv[8]
        else:
            log.error("usage: <peilgebieden> <duikers> <stuwen> <sifons> <waterlopen_legger> <duikers_out> <stuwen_out> <sifons_out>")
            sys.exit(1)

        if not gp.exists(peilgebieden_fc):
                log.error("Features '%s' is not available in the hydrobase" % peilgebieden_fc)
                sys.exit(1)

        # TOEVOEGEN: CONTROLE basisdata

        #---------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")

        missing_fields = []

        #<check required fields from input data,
        #        append them to list if missing>
        #check_fields = {}
        gpgident = config.get("general", "gpgident").lower()
        ovkident = config.get("general", "ovkident").lower()
        
        #legger
        bodemhoogte_benedenstrooms = config.get("controle_kunstwerken", "bodemhoogte_benedenstrooms").lower()
        bodemhoogte_bovenstrooms = config.get("controle_kunstwerken", "bodemhoogte_bovenstrooms").lower()
#        bodem_hoogte_berekend = config.get("controle_kunstwerken", "bodem_hoogte_berekend").lower()
        lengte_waterloop = config.get("controle_kunstwerken", "lengte_waterloop").lower()
        output_field_verhang_bodem = config.get("controle_kunstwerken", "output_field_verhang_bodem").lower()
        #peilgebieden
        winterpeil = config.get("controle_kunstwerken", "winterpeil").lower()
        zomerpeil = config.get("controle_kunstwerken", "zomerpeil").lower()
        
        #verhang = config.get("controle_kunstwerken", "verhang").lower()
        
        #duikers
        kduident = config.get("controle_kunstwerken", "kduident").lower()
        duiker_middellijn_diam= config.get("controle_kunstwerken", "duiker_middellijn_diam").lower()
        duiker_vorm= config.get("controle_kunstwerken", "duiker_vorm").lower()
        duiker_lengte= config.get("controle_kunstwerken", "duiker_lengte").lower()
        duikerhoogte_bovenstrooms= config.get("controle_kunstwerken", "duikerhoogte_bovenstrooms").lower()
        duikerhoogte_benedenstrooms= config.get("controle_kunstwerken", "duikerhoogte_benedenstrooms").lower()
        duikerhoogte= config.get("controle_kunstwerken", "duikerhoogte").lower()
        
        # Inlezen outputveldnamen
        output_field_duikerlengte = config.get("controle_kunstwerken", "output_field_duikerlengte").lower()
        output_field_duikerverhang = config.get("controle_kunstwerken", "output_field_duikerverhang").lower()
        output_field_percentage_bodem = config.get("controle_kunstwerken", "output_field_percentage_bodem").lower()
        output_field_percentage_bovenwinterpeil = config.get("controle_kunstwerken", "output_field_percentage_bovenwinterpeil").lower() 
        output_field_percentage_bovenzomerpeil = config.get("controle_kunstwerken", "output_field_percentage_bovenzomerpeil").lower()
        
        #standaardwaardes
        nodatavalue = int(config.get("controle_kunstwerken", "nodatavalue").lower())
        treshold_value_verhang_duikers = float(config.get("controle_kunstwerken", "treshold_value_verhang_duikers").lower())
        treshold_value_verhang_sifons = config.get("controle_kunstwerken", "treshold_value_verhang_sifons").lower()        

        #stuwen
        kstident = config.get("controle_kunstwerken", "kstident").lower()
        stuw_hoogte = config.get("controle_kunstwerken", "stuw_hoogte").lower()
        
        #sifons
        ksyident = config.get("controle_kunstwerken", "ksyident").lower()
        sifonhoogte_benedenstrooms = config.get("controle_kunstwerken", "sifonhoogte_benedenstrooms").lower()
        sifonhoogte_bovenstrooms = config.get("controle_kunstwerken", "sifonhoogte_bovenstrooms").lower()
        sifon_middellijn_diam = config.get("controle_kunstwerken", "sifon_middellijn_diam").lower()
        sifon_vorm = config.get("controle_kunstwerken", "sifon_vorm").lower()
        sifon_lengte = config.get("controle_kunstwerken", "sifon_lengte").lower()
        sifonhoogte = config.get("controle_kunstwerken", "sifonhoogte").lower()
        
        sifon_middellijn_diam2 = config.get("controle_kunstwerken", "sifon_middellijn_diam2").lower()
        output_field_sifonverhang = config.get("controle_kunstwerken", "output_field_sifonverhang").lower()
        output_field_sifon_percentage_bodem = config.get("controle_kunstwerken", "output_field_sifon_percentage_bodem").lower()
        output_field_sifon_percentage_bovenwinterpeil = config.get("controle_kunstwerken", "output_field_sifon_percentage_bovenwinterpeil").lower()
        output_field_sifon_percentage_bovenzomerpeil = config.get("controle_kunstwerken", "output_field_sifon_percentage_bovenzomerpeil").lower()

        # store fieldnames in a list, for convenience in further use
        list_fieldnames_watergangen = [ovkident, bodemhoogte_benedenstrooms,bodemhoogte_bovenstrooms, lengte_waterloop]
        list_fieldnames_peilgebieden = [gpgident, winterpeil, zomerpeil]
        list_fieldnames_duikers = [kduident, duiker_middellijn_diam,duikerhoogte_bovenstrooms,duikerhoogte_benedenstrooms, duikerhoogte, duiker_vorm, duiker_lengte]
        list_fieldnames_stuwen = [kstident,stuw_hoogte]
        list_fieldnames_sifons = [ksyident,sifonhoogte_benedenstrooms,sifonhoogte_bovenstrooms,sifon_middellijn_diam,sifon_middellijn_diam2, sifon_vorm, sifon_lengte, sifonhoogte] 
        
        check_fields = {peilgebieden_fc: list_fieldnames_peilgebieden,
                         input_waterlopen_legger: list_fieldnames_watergangen,
                         input_duikers: list_fieldnames_duikers,
                         input_stuwen: list_fieldnames_stuwen,
                         input_sifons: list_fieldnames_sifons
                         }
        
        for input_fc, fieldnames in check_fields.items():
            if input_fc !='#':
                for fieldname in fieldnames:
                    if not turtlebase.arcgis.is_fieldname(
                            gp, input_fc, fieldname):
                        errormsg = "fieldname %s not available in %s" % (
                                        fieldname, input_fc)
                        log.error(errormsg)
                        missing_fields.append(errormsg)

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #---------------------------------------------------------------------
        # Environments
        log.info("Check numbers of fields in input data")
        errorcode = 0
        nr_gpg = turtlebase.arcgis.fc_records(gp, peilgebieden_fc)
        if nr_gpg == 0:
            log.error("%s fc is empty" % peilgebieden_fc)
            errorcode += 1
        
        if input_duikers != '#':
            #-------------------------------------------------------------------------------------------------------------------------------
            # SECTION DUIKERS
            # 
            #
            #---------------------------------------------------------------------
            # check of alle benodigde bestanden zijn ingevuld:
            benodigdebestanden = [input_waterlopen_legger, peilgebieden_fc, output_duikers]
            for fc in benodigdebestanden:
                
                if fc =='#': 
                    log.error('Bestand %s is noodzakelijk om duikers te controleren' %fc)
            # Initieer dictionary
            duikers = {}
            #---------------------------------------------------------------------
            # Join van duikers met watergangen
            log.info('Koppel kunstwerken met watergangen')
            duikers_incl_watergangen = turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
            gp.Spatialjoin_analysis(input_duikers, input_waterlopen_legger, duikers_incl_watergangen)
            # Wegschrijven data naar dictionary
            log.info('Schrijf informatie weg naar kunstwerken')
            duikers = add_fc_values_to_dict(gp, duikers_incl_watergangen, duikers, kduident, list_fieldnames_watergangen)
            
            #---------------------------------------------------------------------
            # Join van duikers met peilgebieden
            log.info('Koppel kunstwerken met peilgebieden')
            duikers_incl_peilgebieden = turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
            gp.Spatialjoin_analysis(input_duikers, peilgebieden_fc, duikers_incl_peilgebieden)
            duikers = add_fc_values_to_dict(gp, duikers_incl_peilgebieden, duikers, kduident, list_fieldnames_peilgebieden)
            # Inlezen data uit de duikers            
            output_field_duikerlengte = config.get("controle_kunstwerken", "output_field_duikerlengte").lower()
            #duikers = add_fc_attribs_to_dict(gp, duikers_incl_peilgebieden, duikers, kduident, 'Length', output_field_duikerlengte)
            duikers = add_fc_values_to_dict(gp, duikers_incl_peilgebieden, duikers, kduident, list_fieldnames_duikers)
            
            
            #---------------------------------------------------------------------
            # Berekeningen
            
            # Bereken de percentages onder en boven maaiveld
            log.info('Start calculation')
            duikers = calculate_duikers(duikers, config, nodatavalue, treshold_value_verhang_duikers, duiker_vorm,\
                          bodemhoogte_benedenstrooms, bodemhoogte_bovenstrooms,duiker_middellijn_diam, duikerhoogte,\
                          duikerhoogte_bovenstrooms,duikerhoogte_benedenstrooms, zomerpeil, winterpeil,lengte_waterloop,\
                          duiker_lengte, output_field_duikerverhang, output_field_percentage_bodem,\
                          output_field_percentage_bovenwinterpeil, output_field_percentage_bovenzomerpeil,output_field_verhang_bodem)
                          
            #log.info(duikers)
            #log.info('dict_fields %s' %dict_fields)
            log.info('Creeer output file')
            # create rough copy 
            duikers_temp = turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
            gp.Select_analysis(input_duikers,duikers_temp)
            
            # Creeer output fields with types
            dict_fields = create_dict_fields(duikers)
            log.info('Vul output file met berekende waarden')
            log.info('duikers_temp %s' %duikers_temp)
            # Vul de dataset met de waarden uit de dictionary
            addfieldnames(gp, duikers_temp, dict_fields)
            add_dict_values_to_fc(gp, duikers_temp, kduident, duikers, nodatavalue)
            
            # Create output file
            log.info('Opschonen output file')
    
            # Als Append gebruikt wordt, kan er gebruik worden gemaakt van fieldmapping
            if output_duikers == '#':
                log.error('Geen output feature class ingevuld. Kan waarden niet wegschrijven')
                sys.exit(1)
                        
            create_output_dataset(gp, output_duikers, dict_fields)
            gp.Append_management(duikers_temp,output_duikers, 'NO_TEST')
            log.info('Duikers finished')
            #---------------------------------------------------------------------
            
            #-------------------------------------------------------------------------------------------------------------------------------
            # END OF SECTION DUIKERS
            # 
            #
            #---------------------------------------------------------------------
        
        if input_sifons != '#':
            #-------------------------------------------------------------------------------------------------------------------------------
            # SECTION SIFON
            # 
            #
            #---------------------------------------------------------------------
            # check of alle benodigde bestanden zijn ingevuld:
            benodigdebestanden = [input_waterlopen_legger, peilgebieden_fc, output_sifons]
            for fc in benodigdebestanden:
                
                if fc =='#': 
                    log.error('Bestand %s is noodzakelijk om sifons te controleren' %fc)
            # Initieer dictionary
            sifons = {}
            #---------------------------------------------------------------------
            # Join van duikers met watergangen
            log.info('Koppel kunstwerken met watergangen')
            sifons_incl_watergangen = turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
            gp.Spatialjoin_analysis(input_sifons, input_waterlopen_legger, sifons_incl_watergangen)
            # Wegschrijven data naar dictionary
            log.info('Schrijf informatie weg naar kunstwerken')
            sifons = add_fc_values_to_dict(gp, sifons_incl_watergangen, sifons, ksyident, list_fieldnames_watergangen)
            
            #---------------------------------------------------------------------
            # Join van duikers met peilgebieden
            log.info('Koppel kunstwerken met peilgebieden')
            sifons_incl_peilgebieden = turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
            gp.Spatialjoin_analysis(input_sifons, peilgebieden_fc, sifons_incl_peilgebieden)
            sifons = add_fc_values_to_dict(gp, sifons_incl_peilgebieden, sifons, ksyident, list_fieldnames_peilgebieden)
            
            # Inlezen data uit de sifons
            output_field_sifonlengte = config.get("controle_kunstwerken", "output_field_sifonlengte").lower()            
            sifons = add_fc_attribs_to_dict(gp, sifons_incl_peilgebieden, sifons, ksyident, 'Length', output_field_sifonlengte)
            sifons = add_fc_values_to_dict(gp, sifons_incl_peilgebieden, sifons, ksyident, list_fieldnames_sifons)
            
            #---------------------------------------------------------------------
            # Berekeningen
            log.info('Start calculation')

            sifons = calculate_duikers(sifons, config, nodatavalue, treshold_value_verhang_sifons, sifon_vorm,\
                          bodemhoogte_benedenstrooms, bodemhoogte_bovenstrooms,sifon_middellijn_diam, sifonhoogte,\
                          sifonhoogte_bovenstrooms,sifonhoogte_benedenstrooms, zomerpeil, winterpeil,lengte_waterloop,\
                          sifon_lengte, output_field_sifonverhang, output_field_sifon_percentage_bodem,\
                          output_field_sifon_percentage_bovenwinterpeil, output_field_sifon_percentage_bovenzomerpeil,output_field_verhang_bodem)
                          
            
            #log.info('dict_fields %s' %dict_fields)
            log.info('Creeer output file')
            # create rough copy 
            sifons_temp = turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
            gp.Select_analysis(input_sifons,sifons_temp)
            
            # Creeer output fields with types
            dict_fields = create_dict_fields(sifons)
            log.info('Vul output file met berekende waarden')
            log.info('duikers_temp %s' %sifons_temp)
            # Vul de dataset met de waarden uit de dictionary
            addfieldnames(gp, sifons_temp, dict_fields)
            add_dict_values_to_fc(gp, sifons_temp, ksyident, sifons, nodatavalue)
            
            # Create output file
            log.info('Opschonen output file')
    
            # Als Append gebruikt wordt, kan er gebruik worden gemaakt van fieldmapping
            if output_sifons == '#':
                log.error('Geen output feature class ingevuld. Kan waarden niet wegschrijven')
                sys.exit(1)
                        
            create_output_dataset(gp, output_sifons, dict_fields)
            gp.Append_management(sifons_temp,output_sifons, 'NO_TEST')
            log.info('Sifons finished')
            #---------------------------------------------------------------------
            
            #-------------------------------------------------------------------------------------------------------------------------------
            # END OF SECTION SIFONS
            # 
            #
            #---------------------------------------------------------------------
        
        if input_stuwen != '#':
            #-------------------------------------------------------------------------------------------------------------------------------
            # SECTION STUWEN
            # 
            #
            #---------------------------------------------------------------------
            # check of alle benodigde bestanden zijn ingevuld:
            benodigdebestanden = [input_waterlopen_legger, peilgebieden_fc, output_stuwen]
            for index, fc in enumerate(benodigdebestanden):
                if fc =='#': 
                    log.error('Bestand %s is noodzakelijk om duikers te controleren' %benodigdebestanden[index])
            # Initieer dictionary
            stuwen = {}        
        
            #---------------------------------------------------------------------
            # Join van duikers met watergangen
            log.info('Koppel kunstwerken met watergangen')
            stuwen_incl_watergangen = turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
            gp.Spatialjoin_analysis(input_stuwen, input_waterlopen_legger,stuwen_incl_watergangen,'JOIN_ONE_TO_ONE', 'KEEP_ALL', '#', 'CLOSEST')
            # Wegschrijven data naar dictionary
            log.info('Schrijf informatie weg naar kunstwerken')
            stuwen = add_fc_values_to_dict(gp, stuwen_incl_watergangen, stuwen, kstident, list_fieldnames_watergangen)
            #---------------------------------------------------------------------
            # Join van duikers met peilgebieden
            log.info('Koppel kunstwerken met peilgebieden')
            stuwen_incl_peilgebieden = turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
            
            gp.Spatialjoin_analysis(input_stuwen, peilgebieden_fc, stuwen_incl_peilgebieden)
            stuwen = add_fc_values_to_dict(gp, stuwen_incl_peilgebieden, stuwen, kstident, list_fieldnames_peilgebieden)
            
            # Inlezen data uit de duikers
            stuwen = add_fc_values_to_dict(gp, stuwen_incl_peilgebieden, stuwen, kstident, list_fieldnames_stuwen)
            
            #---------------------------------------------------------------------
            # Berekeningen
            log.info('Start calculation')
            # Inlezen outputveldnamen
            output_field_stuw_percentage_bodem = config.get("controle_kunstwerken", "output_field_stuw_percentage_bodem").lower()
            output_field_stuw_tov_winterpeil = config.get("controle_kunstwerken", "output_field_stuw_tov_winterpeil").lower()
            output_field_stuw_tov_zomerpeil = config.get("controle_kunstwerken", "output_field_stuw_tov_zomerpeil").lower() 
            
            stuwen = calculate_stuwen(stuwen, config, nodatavalue,\
                                      bodemhoogte_benedenstrooms, bodemhoogte_bovenstrooms,\
                                      stuw_hoogte, zomerpeil, winterpeil,\
                                      output_field_stuw_percentage_bodem, output_field_stuw_tov_winterpeil,\
                                      output_field_stuw_tov_zomerpeil)
        
            
            #log.info('dict_fields %s' %dict_fields)
            log.info('Creeer output file stuwen')
            # create rough copy 
            stuwen_temp = turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
            gp.Select_analysis(input_stuwen, stuwen_temp)
            
            # Creeer output fields with types
            dict_fields = create_dict_fields(stuwen)
            log.info('Vul output file met berekende waarden')
            log.info('stuwen_temp %s' %stuwen_temp)
            # Vul de dataset met de waarden uit de dictionary
            addfieldnames(gp, stuwen_temp, dict_fields)
            add_dict_values_to_fc(gp, stuwen_temp, kstident, stuwen, nodatavalue)
            
            # Create output file
            log.info('Opschonen output file')
    
            # Als Append gebruikt wordt, kan er gebruik worden gemaakt van fieldmapping
            if output_stuwen == '#':
                log.error('Geen output feature class ingevuld. Kan waarden niet wegschrijven')
                sys.exit(1)
            create_output_dataset(gp, output_stuwen, dict_fields, 'POINT')
            gp.Append_management(stuwen_temp,output_stuwen, 'NO_TEST')
            log.info('Stuwen finished')
        
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            #gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)
        
        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 8
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        # Input parameters
        if len(sys.argv) == 4:
            input_calculation_points = sys.argv[1]
            input_level_area = sys.argv[2] # peilgebieden
            output_afv_oppervlak = sys.argv[3]
        else:
            log.error("usage: <input_calculation_points> <input_level_areas> <output_voronoi>")
            sys.exit(1)
        #----------------------------------------------------------------------------------------
        # Check input parameters
        geometry_check_list = []
        if gp.describe(input_calculation_points).ShapeType != 'Point':
            log.error("Input calculations points is not a points feature class!")
            geometry_check_list.append(input_calculation_points + " -> (Point)")

        if gp.describe(input_level_area).ShapeType != 'Polygon':
            log.error("Input calculations points is not a points feature class!")
            geometry_check_list.append(input_level_area + " -> (Polygon)")

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        #----------------------------------------------------------------------------------------
        # Check required fields in database
        log.info("Check required fields in input data")
        gpgident = config.get('GENERAL', 'gpgident')
        calculation_point = config.get('rrcf_voronoi', 'calculation_point_ident')

        missing_fields = []
        if not turtlebase.arcgis.is_fieldname(gp, input_calculation_points, calculation_point):
            log.debug(" - missing: %s in %s" % (calculation_point, input_calculation_points))
            missing_fields.append("%s: %s" % (input_calculation_points, calculation_point))

        if not turtlebase.arcgis.is_fieldname(gp, input_level_area, gpgident):
            log.debug(" - missing: %s in %s" % (gpgident, input_level_area))
            missing_fields.append("%s: %s" % (input_level_area, gpgident))

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)

        #----------------------------------------------------------------------------------------
        # Create voronoi polygons
        temp_voronoi = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        temp_voronoi = turtlebase.voronoi.create_voronoi(input_calculation_points, calculation_point, input_level_area, gpgident, temp_voronoi, workspace_gdb)
        gp.CopyFeatures_management(temp_voronoi, output_afv_oppervlak)

        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase
        try:
            log.info("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)
        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        #check inputfields
        log.info("Getting commandline parameters... ")
        if len(sys.argv) == 5:
            input_polygon_fc = sys.argv[1] #peilgebieden waarbinnen de afvoervakken moeten worden gezocht
            input_channel_fc = sys.argv[2] #lijnstukken waarvan het dichtsbijzijnde gebied moet worden gezocht
            output_afvoervlakken_shp = sys.argv[3] #shapefile van de gecreerde afvoervakken per lijnstuk
            use_intersect_channel = sys.argv[4] # boolean, opknippen channel: ja of nee

        else:
            log.error("Usage: python rural_genereren_afvoervlakken.py <peilgebieden shape> <waterlijnen shape> <output shape>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        if not turtlebase.arcgis.is_file_of_type(gp, input_polygon_fc, 'Polygon'):
            log.error("%s is not a %s feature class!" % (input_polygon_fc, 'Polygon'))
            geometry_check_list.append("%s -> (%s)" % (input_polygon_fc, 'Polygon'))

        if not turtlebase.arcgis.is_file_of_type(gp, input_channel_fc, 'Polyline'):
            log.error("%s is not a %s feature class!" % (input_channel_fc, 'Polyline'))
            geometry_check_list.append("%s -> (%s)" % (input_channel_fc, 'Polyline'))

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)

        #----------------------------------------------------------------------------------------
        # Check required fields
        log.info("check required fields in input data")
        missing_fields = []
        ovk_field = config.get('afvoervlakken', 'input_channel_ident')
        gpg_field = config.get('GENERAL', 'gpgident')

        if not turtlebase.arcgis.is_fieldname(gp, input_polygon_fc, gpg_field):
            log.error("missing field '%s' in %s" % (gpg_field, input_polygon_fc))
            missing_fields.append("%s: %s" % (input_polygon_fc, gpg_field))

        if not turtlebase.arcgis.is_fieldname(gp, input_channel_fc, ovk_field):
            log.error("missing field '%s' in %s" % (ovk_field, input_channel_fc))
            missing_fields.append("%s: %s" % (input_channel_fc, ovk_field))

        if len(missing_fields) > 0:
            log.error("missing fields: %s" % missing_fields)

        #----------------------------------------------------------------------------------------
        polygon_dict = nens.gp.get_table(gp, input_polygon_fc, primary_key=gpg_field.lower())

        #extract channels within polygon
        intersect_waterlijn = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Intersect_analysis([input_polygon_fc, input_channel_fc], intersect_waterlijn)

        polygon_list = []
        if not os.path.isdir(os.path.join(workspace, "voronoi_work")):
            os.makedirs(os.path.join(workspace, "voronoi_work"))
        counter = 0
        for k in polygon_dict.keys():
            counter += 1
            log.info("extract polygon %s" % k)

            huidig_peilgebied_lyr = "gpg_%s" % counter
            gp.MakeFeatureLayer(input_polygon_fc, huidig_peilgebied_lyr, "%s = '%s'" % (gpg_field, k))

            log.debug("extract polylines within %s" % k)

            huidige_waterlijn_lyr = "ovk_%s" % counter
            gp.MakeFeatureLayer(intersect_waterlijn, huidige_waterlijn_lyr, "%s = '%s'" % (gpg_field, k))

            #count records
            record_count = turtlebase.arcgis.fc_records(gp, huidige_waterlijn_lyr)
            log.debug(" - record count: %s" % record_count)

            if record_count > 1:
                log.info(" - create voronoi polygons")
                point_selection = turtlebase.voronoi.create_points(gp, huidige_waterlijn_lyr, ovk_field)

                log.info(" - create line_voronoi")
                result_dict = turtlebase.voronoi.create_line_voronoi(point_selection)

                log.info(" - create polygons")
                polygon_fc = turtlebase.voronoi.create_merged_polygons(result_dict, workspace_gdb)

                log.info(" - intersect line_voronoi polygons")
                output_intersect_fc = os.path.join(workspace, "voronoi_work", "voronoi_%s.shp" % counter)

                gp.Intersect_analysis(huidig_peilgebied_lyr + ";" + polygon_fc, output_intersect_fc)

                polygon_list.append(output_intersect_fc)

            elif record_count == 1:
                log.debug(" - 1 watergang in peilgebied, opknippen dus niet nodig, kopieer gpg")
                output_spatial_join = os.path.join(workspace, "voronoi_work", "out_sj_%s.shp" % counter)

                gp.SpatialJoin_analysis(huidig_peilgebied_lyr, huidige_waterlijn_lyr, output_spatial_join)
                polygon_list.append(output_spatial_join)
            else:
                log.warning(" - geen watergang aanwezig in peilgebied, peilgebied wordt in zijn geheel meegenomen")
                polygon_list.append(huidig_peilgebied_lyr)
                pass
        #----------------------------------------------------------------------------------------
        # Merge all polygons together
        merge_str = ";".join(polygon_list)
        
        fieldmappings = gp.createobject("FieldMappings")
        fldmap_OVK_ID = gp.createobject("FieldMap")

        for fc in polygon_list:
            try:
                fldmap_OVK_ID.AddInputField(fc, ovk_field)
            except:
                pass

        fieldmappings.AddFieldMap(fldmap_OVK_ID)

        if use_intersect_channel == 'true':
            gp.Merge_management(merge_str, output_afvoervlakken_shp, fieldmappings)
        else:
            temp_merge_fc = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            gp.Merge_management(merge_str, temp_merge_fc, fieldmappings)
            gp.dissolve_management(temp_merge_fc, output_afvoervlakken_shp, ovk_field)


        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase
        shutil.rmtree(os.path.join(workspace, "voronoi_work"))
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)
            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            import tempfile
            workspace = tempfile.gettempdir()
        log.info("workspace: %s" % workspace)

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                                                gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #---------------------------------------------------------------------
        # Input parameters
        """
        nodig voor deze tool:
        """
        if len(sys.argv) == 5:
            input_level_area_fc = sys.argv[1]
            input_level_area_table = sys.argv[2]
            input_ahn_raster = sys.argv[3]
            output_surface_table = sys.argv[4]
        else:
            log.error("usage: <input_level_area_fc> <input_level_area_table> \
                    <input_ahn_raster> <output_surface_table>")
            sys.exit(1)

        #---------------------------------------------------------------------
        # Check geometry input parameters
        cellsize = gp.describe(input_ahn_raster).MeanCellHeight

        log.info("Check geometry of input parameters")
        geometry_check_list = []

        log.debug(" - check voronoi polygon: %s" % input_level_area_fc)
        if gp.describe(input_level_area_fc).ShapeType != 'Polygon':
            log.error("%s is not a polygon feature class!",
                      input_level_area_fc)
            geometry_check_list.append(input_level_area_fc + " -> (Polygon)")

        if gp.describe(input_ahn_raster).PixelType[0] not in ['U', 'S']:
            log.error("Input AHN is a floating point raster, \
                    for this script an integer is necessary")
            geometry_check_list.append(input_ahn_raster + " -> (Integer)")

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        else:
            print "A"
        #---------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")

        missing_fields = []

        # <check required fields from input data,
        # append them to list if missing>"
        if not turtlebase.arcgis.is_fieldname(
                            gp, input_level_area_fc, config.get(
                            'maaiveldkarakteristiek',
                            'input_peilgebied_ident')):
            log.debug(" - missing: %s in %s" % (
                    config.get('maaiveldkarakteristiek',
                               'input_peilgebied_ident'), input_level_area_fc))
            missing_fields.append("%s: %s" % (
                    input_level_area_fc, config.get('maaiveldkarakteristiek',
                                                    'input_peilgebied_ident')))
        if not turtlebase.arcgis.is_fieldname(
                            gp, input_level_area_table, config.get(
                            'maaiveldkarakteristiek',
                            'input_peilgebied_ident')):
            log.debug(" - missing: %s in %s" % (
                    config.get('maaiveldkarakteristiek',
                               'input_peilgebied_ident'),
                    input_level_area_table))
            missing_fields.append("%s: %s" % (
                            input_level_area_table, config.get(
                                'maaiveldkarakteristiek',
                                'input_peilgebied_ident')))

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #---------------------------------------------------------------------
        # Environments
        log.info("Set environments")
        temp_level_area = os.path.join(workspace_gdb, "peilgebieden")
        gp.select_analysis(input_level_area_fc, temp_level_area)
        # use extent from level area
        gp.extent = gp.describe(temp_level_area).extent

        #---------------------------------------------------------------------
        # create ahn ascii
        log.info("Create ascii from ahn")

        ahn_ascii = turtlebase.arcgis.get_random_file_name(workspace, ".asc")
        log.debug("ahn ascii: %s" % ahn_ascii)
        gp.RasterToASCII_conversion(input_ahn_raster, ahn_ascii)

        #---------------------------------------------------------------------
        # Add ID Int to level area
        log.info("Create level area ascii")
        area_id_dict = add_integer_ident(gp, temp_level_area, config.get(
                    'maaiveldkarakteristiek', 'id_int').lower(),
                                         config.get('maaiveldkarakteristiek',
                                                    'input_peilgebied_ident'))

        out_raster_dataset = turtlebase.arcgis.get_random_file_name(
                                                        workspace_gdb)
        gp.FeatureToRaster_conversion(temp_level_area, config.get(
            'maaiveldkarakteristiek', 'id_int'), out_raster_dataset, cellsize)

        id_int_ascii = turtlebase.arcgis.get_random_file_name(
                            workspace, ".asc")
        log.debug("id_int_ascii: %s" % id_int_ascii)
        gp.RasterToASCII_conversion(out_raster_dataset, id_int_ascii)

        #---------------------------------------------------------------------
        log.info("Read targetlevel table")
        area_level_dict = nens.gp.get_table(
                            gp, input_level_area_table, primary_key=config.get(
                                'maaiveldkarakteristiek',
                                'input_peilgebied_ident').lower())
        target_level_dict = {}

        for k, v in area_level_dict.items():
            if k in area_id_dict:
                id_int = area_id_dict[k][config.get('maaiveldkarakteristiek',
                                                    'id_int').lower()]
                target_level_dict[id_int] = {
                            'targetlevel': v[config.get(
                            'maaiveldkarakteristiek',
                            'field_streefpeil').lower()],
                            'gpgident': k,
                                             }
        #---------------------------------------------------------------------
        log.info("create S-Curve")
        mv_procent_str = config.get('maaiveldkarakteristiek', 'mv_procent')
        field_range = mv_procent_str.split(', ')
        #scurve_dict = turtlebase.spatial.create_scurve(ahn_ascii,
        # id_int_ascii, target_level_dict, field_range)
        scurve_dict = turtlebase.spatial.surface_level_statistics(
                            ahn_ascii, id_int_ascii,
                            target_level_dict, field_range)
        #---------------------------------------------------------------------
        log.info("Create output table")
        create_output_table(gp, output_surface_table, config.get(
            'maaiveldkarakteristiek', 'input_peilgebied_ident'), field_range)
        #---------------------------------------------------------------------
        # Add metadata
        import time
        date_time_str = time.strftime("%d %B %Y %H:%M:%S")
        source = input_ahn_raster

        for k, v in scurve_dict.items():
            scurve_dict[k]['date_time'] = date_time_str
            scurve_dict[k]['source'] = source

        #---------------------------------------------------------------------
        # Write results to output table
        log.info("Write results to output table")
        turtlebase.arcgis.write_result_to_output(
                            output_surface_table, config.get(
                                'maaiveldkarakteristiek',
                                'input_peilgebied_ident').lower(), scurve_dict)

        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            #gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        tempfiles = os.listdir(workspace)
        for tempfile in tempfiles:
            if tempfile.endswith('.asc'):
                try:
                    os.remove(os.path.join(workspace, tempfile))
                except Exception, e:
                    log.debug(e)

        mainutils.log_footer()
Ejemplo n.º 11
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
                log.error("failed to create a file geodatabase in %s" % workspace)
        # Input parameters
        if len(sys.argv) == 11:
            # input parameters
            input_voronoi_polygon = sys.argv[1]
            input_rrcf_waterlevel = sys.argv[2]
            input_ahn_raster = sys.argv[3]
            input_lgn_raster = sys.argv[4]
            input_lgn_conversion = sys.argv[5]

            # output parameters
            output_result_table = sys.argv[6]

            # optional output
            output_inundation = sys.argv[7]
            if output_inundation == "#":
                output_inundation = os.path.join(workspace_gdb, "inun_nbw")

            if len(os.path.basename(output_inundation)) > 13:
                log.error("filename raster output (%s) cannot contain more than 13 characters" % os.path.basename(output_inundation))
                sys.exit(1)

            output_waterdamage = sys.argv[8]
            if output_waterdamage == "#":
                output_waterdamage = os.path.join(workspace_gdb, "damage_nbw")

            if len(os.path.basename(output_waterdamage)) > 13:
                log.error("filename raster output (%s) cannot contain more than 13 characters" % os.path.basename(output_waterdamage))
                sys.exit(1)

            output_inundation_total = sys.argv[9]
            if len(os.path.basename(output_inundation_total)) > 13:
                log.error("filename raster output (%s) cannot contain more than 13 characters" % os.path.basename(output_inundation_total))
                sys.exit(1)

            output_waterdamage_total = sys.argv[10]
            if len(os.path.basename(output_waterdamage_total)) > 13:
                log.error("filename raster output (%s) cannot contain more than 13 characters" % os.path.basename(output_waterdamage_total))
                sys.exit(1)

        else:
            log.error("usage: <input_voronoi_polygon> <input_rrcf_waterlevel> <input_ahn_raster> \
            <input_lgn_raster> <input_lgn_conversion> <output_result_table> \
            <output_inundation> <output_waterdamage> <output inundation total> <output waterdamage total>")
            sys.exit(1)
        #----------------------------------------------------------------------------------------
        temp_voronoi = os.path.join(workspace_gdb, "voronoi")
        gp.select_analysis(input_voronoi_polygon, temp_voronoi)

        # Check geometry input parameters
        cellsize = gp.describe(input_ahn_raster).MeanCellHeight

        log.info("Check geometry of input parameters")
        geometry_check_list = []

        if input_lgn_conversion != "#":
            if not gp.exists(input_lgn_conversion):
                errormsg = "%s does not exist" % input_lgn_conversion
                log.error(errormsg)
                geometry_check_list.append(errormsg)

        log.debug(" - check voronoi polygon: %s" % temp_voronoi)
        if gp.describe(temp_voronoi).ShapeType != 'Polygon':
            log.error("Input voronoi is not a polygon feature class!")
            geometry_check_list.append(temp_voronoi + " -> (Polygon)")

        log.debug(" - check ahn raster %s" % input_ahn_raster)
        if gp.describe(input_ahn_raster).DataType != 'RasterDataset':
            log.error("Input AHN is not a raster dataset")
            sys.exit(1)

        if gp.describe(input_ahn_raster).PixelType[0] not in ['U', 'S']:
            log.error("Input AHN is a floating point raster, for this script an integer is nessecary")
            geometry_check_list.append(input_ahn_raster + " -> (Integer)")

        log.debug(" - check lgn raster %s" % input_lgn_raster)
        if gp.describe(input_lgn_raster).DataType != 'RasterDataset':
            log.error("Input LGN is not a raster dataset")
            sys.exit(1)

        if gp.describe(input_lgn_raster).PixelType[0] not in ['U', 'S']:
            log.error("Input LGN is a floating point raster, for this script an integer is nessecary")
            geometry_check_list.append(input_lgn_raster + " -> (Integer)")

        if gp.describe(input_lgn_raster).MeanCellHeight != float(cellsize):
            log.error("Cell size of LGN is %s, must be %s" % (
                                    gp.describe(input_lgn_raster).MeanCellHeight, cellsize))
            geometry_check_list.append(input_lgn_raster + " -> (Cellsize %s)" % cellsize)

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        #----------------------------------------------------------------------------------------
        # Check required fields in database
        log.info("Check required fields in input data")
        # create return period list
        return_periods = config.get('naverwerking_rrcf', 'herhalingstijden').split(", ")
        log.debug(" - return periods: %s" % return_periods)

        missing_fields = []

        for return_period in return_periods:
            if not turtlebase.arcgis.is_fieldname(gp, input_rrcf_waterlevel, "WS_%s" % return_period):
                log.debug(" - missing: %s in %s" % ("WS_%s" % return_period, input_rrcf_waterlevel))
                missing_fields.append("%s: %s" % (input_rrcf_waterlevel, "WS_%s" % return_period))

        #<check required fields from input data, append them to list if missing>"
        field_streefpeil = config.get('naverwerking_rrcf', 'field_streefpeil')
        check_fields = {input_rrcf_waterlevel: [config.get('naverwerking_rrcf', 'calculation_point_ident'), field_streefpeil]}
        if input_lgn_conversion != "#":
            check_fields[input_lgn_conversion] = [config.get('naverwerking_rrcf', 'lgn_conv_ident'),
                                                    config.get('naverwerking_rrcf', 'input_field_k5')]
        for input_fc, fieldnames in check_fields.items():
            for fieldname in fieldnames:
                if not turtlebase.arcgis.is_fieldname(gp, input_fc, fieldname):
                    errormsg = "fieldname %s not available in %s" % (fieldname, input_fc)
                    log.error(errormsg)
                    missing_fields.append(errormsg)

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #---------------------------------------------------------------------
        # Environments
        log.info("Set environments")
        gp.extent = gp.describe(temp_voronoi).extent  # use extent from LGN

        #---------------------------------------------------------------------
        # read waterlevel table as a dictionary
        log.info("Read waterlevel table")
        waterlevel_dict = nens.gp.get_table(gp, input_rrcf_waterlevel, primary_key=config.get('naverwerking_rrcf', 'calculation_point_ident').lower())
        log.debug(waterlevel_dict)

        # Add fields to output
        for return_period in return_periods:
            if not turtlebase.arcgis.is_fieldname(gp, temp_voronoi, "WS_%s" % return_period):
                log.info(" - add field WS_%s" % return_period)
                gp.addfield(temp_voronoi, "WS_%s" % return_period, "double")

        if not turtlebase.arcgis.is_fieldname(gp, temp_voronoi, field_streefpeil):
                log.info(" - add field %s" % field_streefpeil)
                gp.addfield(temp_voronoi, field_streefpeil, "double")

        # copy waterlevel to voronoi polygons
        rows = gp.UpdateCursor(temp_voronoi)
        for row in nens.gp.gp_iterator(rows):
            row_id = row.GetValue(config.get('naverwerking_rrcf', 'calculation_point_ident'))
            if row_id in waterlevel_dict:
                log.debug(waterlevel_dict[row_id])
                for return_period in return_periods:
                    row.SetValue("WS_%s" % return_period, waterlevel_dict[row_id]['ws_%s' % return_period])
                    row.SetValue(field_streefpeil,
                                 waterlevel_dict[row_id][field_streefpeil.lower()])
                rows.UpdateRow(row)

        #---------------------------------------------------------------------
        # Create waterlevel rasters
        log.info("Create rasters for waterlevels")
        for return_period in return_periods:
            log.info(" - create raster for ws_%s" % return_period)
            out_raster_dataset = workspace_gdb + "/ws_%s" % return_period
            gp.FeatureToRaster_conversion(temp_voronoi, "WS_%s" % return_period, out_raster_dataset, cellsize)

        #---------------------------------------------------------------------
        # Create target level raster
        log.info("Create targetlevel raster")
        out_raster_targetlevel = os.path.join(workspace_gdb, "targetlv")
        gp.FeatureToRaster_conversion(temp_voronoi, field_streefpeil, out_raster_targetlevel, cellsize)

        #---------------------------------------------------------------------
        # Create freeboard raster
        log.info("Create freeboard raster")

        # create ahn ascii
        ahn_ascii = os.path.join(workspace, "ahn.asc")
        log.debug("ahn ascii: %s" % ahn_ascii)
        gp.RasterToASCII_conversion(input_ahn_raster, ahn_ascii)

        targetlevel_ascii = os.path.join(workspace, "targetlvl.asc")
        log.debug("targetlevel ascii: %s" % targetlevel_ascii)
        gp.RasterToASCII_conversion(out_raster_targetlevel, targetlevel_ascii)

        freeboard_ascii = os.path.join(workspace, "freeboard.asc")
        turtlebase.spatial.create_freeboard_raster(ahn_ascii, targetlevel_ascii, freeboard_ascii)

        #----------------------------------------------------------------------------------------
        # Create K5 LGN
        log.info("Reclass LGN to K5 raster")
        lgn_ascii = os.path.join(workspace, "lgn.asc")
        lgn_k5_ascii = os.path.join(workspace, "lgn_k5.asc")

        gp.RasterToASCII_conversion(input_lgn_raster, lgn_ascii)

        if input_lgn_conversion != '#':
            reclass_dict = nens.gp.get_table(gp, input_lgn_conversion,
                                             primary_key=config.get('naverwerking_rrcf', 'lgn_conv_ident').lower())
            turtlebase.spatial.reclass_lgn_k5(lgn_ascii, lgn_k5_ascii, reclass_dict)
        else:
            turtlebase.spatial.reclass_lgn_k5(lgn_ascii, lgn_k5_ascii)

        #----------------------------------------------------------------------------------------
        # Create inundation raster
        # als ws_ > ahn, dan inundatie
        inundation_raster_list = []
        inundation_total_raster_list = []

        log.info("Create inundation rasters")
        # inundatie stedelijk
        return_period_urban = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_stedelijk')
        if config.get('naverwerking_rrcf', 'percentage_inundatie_stedelijk') != "-":
            log.info(" - create inundation urban")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_urban)
            if gp.exists(waterlevel):
                inundation_urban = os.path.join(workspace, "inun_urban.asc")
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel, 1,
                                                            return_period_urban, inundation_urban, workspace, use_lgn=True)
                inundation_raster_list.append(inundation_urban)
                if output_inundation_total != '#':
                    # Inundation without lgn
                    inundation_total_urban = os.path.join(workspace, "inun_total_urban.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               1, return_period_urban, inundation_total_urban, workspace, use_lgn=False)
                    inundation_total_raster_list.append(inundation_total_urban)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # inundatie hoogwaardige landbouw
        return_period_agriculture = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_hoogwaardig')
        if config.get('naverwerking_rrcf', 'percentage_inundatie_hoogwaardig') != "-":
            log.info(" - create inundation agriculture")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_agriculture)
            if gp.exists(waterlevel):
                # Inundation with lgn
                inundation_agriculture = os.path.join(workspace, "inun_agri.asc")
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                           2, return_period_agriculture, inundation_agriculture, workspace, use_lgn=True)
                inundation_raster_list.append(inundation_agriculture)
                if output_inundation_total != '#':
                    # Inundation without lgn
                    inundation_total_agriculture = os.path.join(workspace, "inun_total_agri.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               2, return_period_agriculture, inundation_total_agriculture, workspace, use_lgn=False)
                    inundation_total_raster_list.append(inundation_total_agriculture)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # inundatie akkerbouw
        return_period_rural = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_akker')
        if config.get('naverwerking_rrcf', 'percentage_inundatie_akker') != "-":
            log.info(" - create inundation rural")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_rural)
            if gp.exists(waterlevel):
                inundation_rural = os.path.join(workspace, "inun_rural.asc")
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                           3, return_period_rural, inundation_rural, workspace, use_lgn=True)
                inundation_raster_list.append(inundation_rural)
                if output_inundation_total != '#':
                    # Inundation without lgn
                    inundation_total_rural = os.path.join(workspace, "inun_total_rural.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               3, return_period_rural, inundation_total_rural, workspace, use_lgn=False)
                    inundation_total_raster_list.append(inundation_total_rural)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # inundatie grasland
        return_period_grass = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_grasland')
        if config.get('naverwerking_rrcf', 'percentage_inundatie_grasland') != "-":
            log.info(" - create inundation grass")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_grass)
            if gp.exists(waterlevel):
                log.debug("waterlevel grasland = %s" % waterlevel)
                inundation_grass = os.path.join(workspace, "inun_grass.asc")
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                           4, return_period_grass, inundation_grass, workspace, use_lgn=True)
                inundation_raster_list.append(inundation_grass)
                if output_inundation_total != '#':
                    # Inundation without lgn
                    inundation_total_grass = os.path.join(workspace, "inun_total_grass.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               4, return_period_grass, inundation_total_grass, workspace, use_lgn=False)
                    inundation_total_raster_list.append(inundation_total_grass)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        if len(inundation_raster_list) > 1:
            log.info("Merge inundation rasters")
            output_inundation_exists = turtlebase.spatial.merge_ascii(inundation_raster_list, output_inundation, workspace)
        else:
            log.error("there are no inundation rasters available")

        if len(inundation_total_raster_list) > 1:
            log.info("Merge inundation total rasters")
            turtlebase.spatial.merge_ascii(inundation_total_raster_list, output_inundation_total, workspace)

        #----------------------------------------------------------------------------------------
        # Create waterdamage raster
        # als ws_ > freeboard, dan overlast
        damage_raster_list = []
        damage_total_raster_list = []

        log.info("Create waterdamage rasters")
        # overlast stedelijk
        return_period_urban_damage = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_stedelijk')
        if config.get('naverwerking_rrcf', 'percentage_overlast_stedelijk') != "-":
            log.info(" - create waterdamage urban")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_urban_damage)
            if gp.exists(waterlevel):
                damage_urban = os.path.join(workspace, "damage_urban.asc")
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, freeboard_ascii, waterlevel,
                                                           1, return_period_urban_damage, damage_urban, workspace, use_lgn=True)
                damage_raster_list.append(damage_urban)
                if output_waterdamage_total != '#':
                    # Waterdamage without lgn
                    damage_total_urban = os.path.join(workspace, "damage_total_urban.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               1, return_period_urban_damage, damage_total_urban, workspace, use_lgn=False)
                    damage_total_raster_list.append(damage_total_urban)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # overlast hoogwaardig
        return_period_agriculture_damage = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_hoogwaardig')
        if config.get('naverwerking_rrcf', 'percentage_overlast_hoogwaardig') != "-":
            log.info(" - create waterdamage agriculture")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_agriculture_damage)
            if gp.exists(waterlevel):
                damage_agriculture = workspace + "/damage_agri_%s.asc" % return_period_agriculture_damage
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, freeboard_ascii, waterlevel,
                                                           2, return_period_agriculture_damage, damage_agriculture, workspace, use_lgn=True)
                damage_raster_list.append(damage_agriculture)
                if output_waterdamage_total != '#':
                    # Waterdamage without lgn
                    damage_total_agriculture = os.path.join(workspace, "damage_total_agri.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               1, return_period_agriculture_damage, damage_total_agriculture, workspace, use_lgn=False)
                    damage_total_raster_list.append(damage_total_agriculture)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # overlast akker
        return_period_rural_damage = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_akker')
        if config.get('naverwerking_rrcf', 'percentage_overlast_akker') != "-":
            log.info(" - create waterdamage rural")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_rural_damage)
            if gp.exists(waterlevel):
                damage_rural = workspace + "/damage_rural_%s.asc" % return_period_rural_damage
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, freeboard_ascii, waterlevel,
                                                           3, return_period_rural_damage, damage_rural, workspace, use_lgn=True)
                damage_raster_list.append(damage_rural)
                if output_waterdamage_total != '#':
                    # Waterdamage without lgn
                    damage_total_rural = os.path.join(workspace_gdb, "damage_total_rural.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               1, return_period_rural_damage, damage_total_rural, workspace, use_lgn=False)
                    damage_total_raster_list.append(damage_total_rural)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # overlast grasland
        return_period_grass_damage = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_grasland')
        if config.get('naverwerking_rrcf', 'percentage_overlast_grasland') != "-":
            log.info(" - create waterdamage grass")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_grass_damage)
            if gp.exists(waterlevel):
                damage_grass = os.path.join(workspace_gdb, "damage_grass.asc")
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, freeboard_ascii, waterlevel,
                                                           4, return_period_grass_damage, damage_grass, workspace, use_lgn=True)
                damage_raster_list.append(damage_grass)
                if output_waterdamage_total != '#':
                    # Waterdamage without lgn
                    damage_total_grass = os.path.join(workspace_gdb, "damage_total_grass.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               1, return_period_grass_damage, damage_total_grass, workspace, use_lgn=False)
                    damage_total_raster_list.append(damage_total_grass)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # Merge waterdamage rasters
        if len(damage_raster_list) > 1:
            log.info("Merge waterdamage rasters")
            output_waterdamage_exists = turtlebase.spatial.merge_ascii(damage_raster_list, output_waterdamage, workspace)
        else:
            log.error("there are no waterdamage rasters available")

        if len(damage_total_raster_list) > 1:
            log.info("Merge waterdamage total rasters")
            turtlebase.spatial.merge_ascii(damage_total_raster_list, output_waterdamage_total, workspace)
        #----------------------------------------------------------------------------------------
        # calculate percentage inundation
        """
        input:
        - inundatie / overlast (raster dataset)
        - input_voronoi_polygon (met GPGIDENT) (feature class)
        - lgn_k5 (raster dataset)
        """
        gpgident_field = config.get('General', 'gpgident')
        # dissolve voronoi based on gpgident

        log.debug("dissolve voronoi polygons, based on gpgident")
        temp_fc_gpgident = os.path.join(workspace_gdb, "temp_fc_gpgident")
        gp.Dissolve_management(temp_voronoi, temp_fc_gpgident, gpgident_field)

        # Calculate area total, gpgident
        if not turtlebase.arcgis.is_fieldname(gp, temp_fc_gpgident, "area_total"):
            gp.addfield(temp_fc_gpgident, "area_total", "Double")
        turtlebase.arcgis.calculate_area(gp, temp_fc_gpgident, "area_total")

        gpgident_dict = nens.gp.get_table(gp, temp_fc_gpgident, primary_key=gpgident_field.lower())
        log.debug("gpgident_dict: %s" % gpgident_dict)

        # create feature class from lgn k5 ascii
        output_reclass_lgn = os.path.join(workspace_gdb, "reclass_lgn")
        gp.ASCIIToRaster_conversion(lgn_k5_ascii, output_reclass_lgn)
        temp_fc_lgn = os.path.join(workspace_gdb, "fc_lgn")
        gp.RasterToPolygon_conversion(output_reclass_lgn, temp_fc_lgn, "NO_SIMPLIFY")

        # union lgn with gpg-areas
        temp_fc_union_lgn = os.path.join(workspace_gdb, "fc_union_lgn")
        gp.Union_analysis(temp_fc_gpgident + ";" + temp_fc_lgn, temp_fc_union_lgn)
        dissolve_lyr = turtlebase.arcgis.get_random_layer_name()
        gp.MakeFeatureLayer_management(temp_fc_union_lgn, dissolve_lyr, "%s <> ''" % gpgident_field)
        temp_fc_dissolve_lgn = os.path.join(workspace_gdb, "dissolve_lgn")
        if turtlebase.arcgis.is_fieldname(gp, dissolve_lyr, "GRIDCODE"):
            gp.Dissolve_management(dissolve_lyr, temp_fc_dissolve_lgn, "%s; GRIDCODE" % gpgident_field)
            gridcode = "gridcode"
        elif turtlebase.arcgis.is_fieldname(gp, dissolve_lyr, "grid_code"):
            gp.Dissolve_management(dissolve_lyr, temp_fc_dissolve_lgn, "%s; grid_code" % gpgident_field)
            gridcode = "grid_code"
        else:
            log.error("no field GRIDCODE or grid_code available in %s" % dissolve_lyr)
            sys.exit(2)

        # Calculate area lgn
        if not turtlebase.arcgis.is_fieldname(gp, temp_fc_dissolve_lgn, "area_lgn"):
            gp.addfield(temp_fc_dissolve_lgn, "area_lgn", "Double")
        turtlebase.arcgis.calculate_area(gp, temp_fc_dissolve_lgn, "area_lgn")

        lgn_dict = nens.gp.get_table(gp, temp_fc_dissolve_lgn)
        translate_lgn_dict = translate_dict(lgn_dict, gridcode, 'area_lgn')
        log.debug("translate_lgn_dict: %s" % translate_lgn_dict)

        # Create feature class from inundation_grid
        """ values: 10, 25, 50, 100"""
        if output_inundation_exists == 0:
            temp_fc_inundation = os.path.join(workspace_gdb, "inundation")
            log.info(output_inundation)
            gp.RasterToPolygon_conversion(output_inundation, temp_fc_inundation, "NO_SIMPLIFY")
            temp_fc_union_inundation = os.path.join(workspace_gdb, "union_inun")
            gp.Union_analysis(temp_fc_dissolve_lgn + ";" + temp_fc_inundation, temp_fc_union_inundation)
            dissolve_inundation_lyr = turtlebase.arcgis.get_random_layer_name()
            if turtlebase.arcgis.is_fieldname(gp, temp_fc_union_inundation, "GRIDCODE_1"):
                gp.MakeFeatureLayer_management(temp_fc_union_inundation, dissolve_inundation_lyr, "GRIDCODE_1 > 0")
                gridcode_1 = "gridcode_1"
            elif turtlebase.arcgis.is_fieldname(gp, temp_fc_union_inundation, "GRID_CODE1"):
                gp.MakeFeatureLayer_management(temp_fc_union_inundation, dissolve_inundation_lyr, "GRID_CODE1 > 0")
                gridcode_1 = "grid_code1"
            elif turtlebase.arcgis.is_fieldname(gp, temp_fc_union_inundation, "GRID_CODE_1"):
                gp.MakeFeatureLayer_management(temp_fc_union_inundation, dissolve_inundation_lyr, "GRID_CODE_1 > 0")
                gridcode_1 = "grid_code_1"
            else:
                log.error("No field available named gridcode_1 or grid_code1")
                log.warning(nens.gp.get_table_def(gp, temp_fc_union_inundation))
                sys.exit(1)
            temp_fc_dissolve_inundation = os.path.join(workspace_gdb, "dissolve_inun")
            dissolve_string = "%s;%s;%s" % (gpgident_field.upper(), gridcode, gridcode_1)
            log.debug(" - dissolve layer: %s" % dissolve_inundation_lyr)
            gp.Dissolve_management(dissolve_inundation_lyr, temp_fc_dissolve_inundation, dissolve_string)

            # Calculate area inundation
            if not turtlebase.arcgis.is_fieldname(gp, temp_fc_dissolve_inundation, "area_inund"):
                gp.addfield(temp_fc_dissolve_inundation, "area_inun", "Double")
            turtlebase.arcgis.calculate_area(gp, temp_fc_dissolve_inundation, "area_inun")

            inundation_dict = nens.gp.get_table(gp, temp_fc_dissolve_inundation)
            translate_inundation_dict = translate_dict(inundation_dict, gridcode_1, 'area_inun')
            log.debug("translate_inundation_dict: %s" % translate_inundation_dict)
        else:
            translate_inundation_dict = {}

        # Create feature class from waterdamage grid
        """ values: 10, 15, 25"""
        if output_waterdamage_exists == 0:
            try:
                temp_fc_waterdamage = os.path.join(workspace_gdb, "damage")
                gp.RasterToPolygon_conversion(output_waterdamage, temp_fc_waterdamage, "NO_SIMPLIFY")
                waterdamage = True
            except:
                log.warning("waterdamage raster is empty")
                waterdamage = False

            if waterdamage:
                temp_fc_union_waterdamage = os.path.join(workspace_gdb, "damage_union")
                gp.Union_analysis(temp_fc_dissolve_lgn + ";" + temp_fc_waterdamage, temp_fc_union_waterdamage)

                dissolve_waterdamage_lyr = turtlebase.arcgis.get_random_layer_name()
                gp.MakeFeatureLayer_management(temp_fc_union_waterdamage, dissolve_waterdamage_lyr, "%s > 0" % gridcode_1)

                temp_fc_dissolve_waterdamage = os.path.join(workspace_gdb, "dissolve_damage")
                gp.Dissolve_management(dissolve_waterdamage_lyr, temp_fc_dissolve_waterdamage, "%s; %s; %s" % (gpgident_field, gridcode, gridcode_1))

                # Calculate area waterdamage
                if not turtlebase.arcgis.is_fieldname(gp, temp_fc_dissolve_waterdamage, "area_damag"):
                    gp.addfield(temp_fc_dissolve_waterdamage, "area_damag", "Double")
                turtlebase.arcgis.calculate_area(gp, temp_fc_dissolve_waterdamage, "area_damag")

                waterdamage_dict = nens.gp.get_table(gp, temp_fc_dissolve_waterdamage)
                translate_waterdamage_dict = translate_dict(waterdamage_dict, gridcode_1, 'area_damag')
                log.debug("translate_waterdamage_dict: %s" % translate_waterdamage_dict)
            else:
                translate_waterdamage_dict = {}
        else:
            translate_waterdamage_dict = {}

        no_data_value = float(config.get('naverwerking_rrcf', 'no_data_value'))
        result_dict = {}
        log.info("Calculating results")
        for gpgident, fields in gpgident_dict.items():
            # area_total
            #area_total = fields['area_total']

            #set defaults
            percentage_inundation_urban = no_data_value
            percentage_inundation_agriculture = no_data_value
            percentage_inundation_rural = no_data_value
            percentage_inundation_grass = no_data_value
            toetsing_inundation_urban = 9
            toetsing_inundation_agriculture = 9
            toetsing_inundation_rural = 9
            toetsing_inundation_grass = 9

            percentage_waterdamage_urban = no_data_value
            percentage_waterdamage_agriculture = no_data_value
            percentage_waterdamage_rural = no_data_value
            percentage_waterdamage_grass = no_data_value
            toetsing_waterdamage_urban = 9
            toetsing_waterdamage_agriculture = 9
            toetsing_waterdamage_rural = 9
            toetsing_waterdamage_grass = 9

            if gpgident in translate_inundation_dict:
                log.debug("Calculate percentage inundation for %s" % gpgident)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_stedelijk')
                toetsing_perc = config.get('naverwerking_rrcf', 'percentage_inundatie_stedelijk')
                toetsing_inundation_urban, percentage_inundation_urban = calculate_toetsing(translate_inundation_dict,
                                                                                            gpgident, 1, translate_lgn_dict,
                                                                                            hhtijd, toetsing_perc, no_data_value)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_hoogwaardig')
                toetsing_perc = config.get('naverwerking_rrcf', 'percentage_inundatie_hoogwaardig')
                toetsing_inundation_agriculture, percentage_inundation_agriculture = calculate_toetsing(translate_inundation_dict,
                                                                                                        gpgident, 2, translate_lgn_dict,
                                                                                                        hhtijd, toetsing_perc, no_data_value)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_akker')
                toetsing_perc = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_akker')
                toetsing_inundation_rural, percentage_inundation_rural = calculate_toetsing(translate_inundation_dict, gpgident,
                                                               3, translate_lgn_dict, hhtijd,
                                                               toetsing_perc, no_data_value)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_grasland')
                toetsing_perc = config.get('naverwerking_rrcf', 'percentage_inundatie_grasland')
                toetsing_inundation_grass, percentage_inundation_grass = calculate_toetsing(translate_inundation_dict, gpgident,
                                                               4, translate_lgn_dict, hhtijd,
                                                               toetsing_perc, no_data_value)

            if gpgident in translate_waterdamage_dict:
                log.debug("Calculate percentage waterdamage for %s" % gpgident)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_stedelijk')
                toetsing_perc = config.get('naverwerking_rrcf', 'percentage_overlast_stedelijk')
                toetsing_waterdamage_urban, percentage_waterdamage_urban = calculate_toetsing(translate_inundation_dict, gpgident,
                                                                                              1, translate_lgn_dict, hhtijd,
                                                                                              toetsing_perc, no_data_value)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_hoogwaardig')
                toetsing_perc = config.get('naverwerking_rrcf', 'percentage_overlast_hoogwaardig')
                toetsing_waterdamage_agriculture, percentage_waterdamage_agriculture = calculate_toetsing(translate_inundation_dict, gpgident,
                                                                                                          2, translate_lgn_dict, hhtijd,
                                                                                                          toetsing_perc, no_data_value)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_akker')
                toetsing_perc = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_akker')
                toetsing_inundation_rural, percentage_waterdamage_rural = calculate_toetsing(translate_inundation_dict, gpgident,
                                                                                             3, translate_lgn_dict, hhtijd,
                                                                                             toetsing_perc, no_data_value)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_grasland')
                toetsing_perc = config.get('naverwerking_rrcf', 'percentage_overlast_grasland')
                toetsing_inundation_grass, percentage_waterdamage_grass = calculate_toetsing(translate_inundation_dict, gpgident,
                                                                                             4, translate_lgn_dict, hhtijd,
                                                                                             toetsing_perc, no_data_value)

            result_dict[gpgident] = {
                    gpgident_field: gpgident,
                    config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_stedelijk'):
                                     percentage_inundation_urban,
                    config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_hoogwaardig'):
                                     percentage_inundation_agriculture,
                    config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_akker'):
                                     percentage_inundation_rural,
                    config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_grasland'):
                                      percentage_inundation_grass,
                    config.get('naverwerking_rrcf',
                            'field_percentage_overlast_stedelijk'):
                                      percentage_waterdamage_urban,
                    config.get('naverwerking_rrcf',
                            'field_percentage_overlast_hoogwaardig'):
                                      percentage_waterdamage_agriculture,
                    config.get('naverwerking_rrcf',
                            'field_percentage_overlast_akker'):
                                      percentage_waterdamage_rural,
                    config.get('naverwerking_rrcf',
                            'field_percentage_overlast_grasland'):
                                     percentage_waterdamage_grass,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_stedelijk'):
                                     toetsing_inundation_urban,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_hoogwaardig'):
                                      toetsing_inundation_agriculture,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_akker'):
                                     toetsing_inundation_rural,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_grasland'):
                                     toetsing_inundation_grass,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_stedelijk'):
                                     toetsing_waterdamage_urban,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_hoogwaardig'):
                                     toetsing_waterdamage_agriculture,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_akker'):
                                     toetsing_waterdamage_rural,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_grasland'):
                                     toetsing_waterdamage_grass,
                                     }
        #---------------------------------------------------------------------
        # Create output table
        if not gp.exists(output_result_table):
            log.info("Create new output table")
            temp_result_table = os.path.join(workspace_gdb, "result_table")
            gp.CreateTable_management(os.path.dirname(temp_result_table), os.path.basename(temp_result_table))
            copy_table = True
        else:
            temp_result_table = output_result_table
            copy_table = False

        fields_to_add = [config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_stedelijk'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_hoogwaardig'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_akker'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_grasland'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_overlast_stedelijk'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_overlast_hoogwaardig'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_overlast_akker'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_overlast_grasland'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_stedelijk'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_hoogwaardig'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_akker'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_grasland'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_stedelijk'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_hoogwaardig'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_akker'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_grasland')]

        if not turtlebase.arcgis.is_fieldname(gp, temp_result_table, gpgident_field):
            log.debug(" - add field %s to %s" % (gpgident_field, temp_result_table))
            gp.addfield_management(temp_result_table, gpgident_field, 'text')

        for field in fields_to_add:
            if not turtlebase.arcgis.is_fieldname(gp, temp_result_table, field):
                log.debug(" - add field %s to %s" % (field, temp_result_table))
                gp.addfield_management(temp_result_table, field, 'double')

        #----------------------------------------------------------------------------------------
        # Write results to output table
        log.info("Write results to output table")
        turtlebase.arcgis.write_result_to_output(temp_result_table, gpgident_field.lower(), result_dict)

        if copy_table == True:
            gp.TableToTable_conversion(temp_result_table, os.path.dirname(output_result_table), os.path.basename(output_result_table))

        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        tempfiles = os.listdir(workspace)
        for tempfile in tempfiles:
            if tempfile.endswith('.asc'):
                try:
                    os.remove(os.path.join(workspace, tempfile))
                except Exception, e:
                    log.debug(e)
                    
        mainutils.log_footer()
Ejemplo n.º 12
0
import logging
import sys
import os
import traceback
import tempfile

from turtlebase.logutils import LoggingConfig
from turtlebase import mainutils
import nens.gp
import turtlebase.arcgis

log = logging.getLogger(__name__)

gp = mainutils.create_geoprocessor()
config = mainutils.read_config(__file__, 'turtle-settings.ini')
logfile = mainutils.log_filename(config)
logging_config = LoggingConfig(gp, logfile=logfile)
mainutils.log_header(__name__)


gpg_cluster = config.get('afvoerendoppervlak', 'gpg_cluster')
gpg_source = config.get('afvoerendoppervlak', 'gpg_source')
gpg_date = config.get('afvoerendoppervlak', 'gpg_date')
gpg_opp = config.get('afvoerendoppervlak', 'gpg_opp').lower()
gpgident = config.get('GENERAL', 'gpgident').lower()
kwkident = config.get('GENERAL', 'kwkident').lower()
kwk_cap = config.get('afvoerendoppervlak', 'kwk_cap').lower()
kwk_cap_h = config.get('afvoerendoppervlak', 'kwk_cap_h').lower()
boundary_str = config.get('afvoerendoppervlak', 'boundary_str')
afvoer_van = config.get('afvoerendoppervlak', 'afvoer_van').lower()
afvoer_naar = config.get('afvoerendoppervlak', 'afvoer_naar').lower()
Ejemplo n.º 13
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        #check inputfields
        log.info("Getting commandline parameters... ")
        #use_onderbemalingen = False
        if len(sys.argv) == 6:
            input_peilgebiedgegevens = sys.argv[1]
            input_toetspunten = sys.argv[2]
            input_resultaten = sys.argv[3]
            output_table = sys.argv[4]
            output_csv = sys.argv[5]
            use_csv = not(output_csv == '#')
        else:
            log.error("Usage: python rural_naverwerking.py <peilvakgegevens table> <toetspunten_table> <resultaten_csv> <output_table> <output_csv>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        #check input parameters
        log.info('Checking presence of input files... ')
        if not(use_csv):
            log.warning("no output has been defined, output will be written to temp workspace")
        if not(gp.exists(input_toetspunten)):
            log.error("input_toetspunten "+input_toetspunten+" does not exist!")
            sys.exit(5)
        if not(gp.exists(input_resultaten)):
            log.error("input_resultaten "+input_resultaten+" does not exist!")
            sys.exit(5)

        log.info('input parameters checked... ')

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL','location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        log.info("A-1) Reading peilgebiedgegevens... ")
        gpgident = config.get('GENERAL', 'gpgident')
        peilgebied_dict = nens.gp.get_table(gp, input_peilgebiedgegevens, primary_key=gpgident.lower())

        log.info("A-2) Converting toetspunten to csv")
        toetspunten_csv = os.path.join(workspace, "nav_toets.csv")
        nav_toetspunten = nens.gp.join_on_primary_key(gp, peilgebied_dict, input_toetspunten, gpgident.lower())

        turtlebase.arcgis.convert_dict_to_csv(nav_toetspunten, toetspunten_csv)

        log.info("A-3) Preparing hymstat csv")
        hymstat_csv = os.path.join(workspace, "nav_hym.csv")
        #turtlebase.arcgis.convert_table_to_csv(gp, input_resultaten, hymstat_csv)
        shutil.copyfile(input_resultaten, hymstat_csv)

        #prepare naverwerking ini file
        log.info("B-1) Reading naverwerkingstool.ini... ")
        location_script = os.path.dirname(sys.argv[0])
        nav_config = mainutils.read_config(__file__, config.get('GENERAL', 'filename_naverwerking_ini'))
        configfilename = os.path.join(location_script, config.get('GENERAL', 'filename_naverwerking_ini'))

        nav_config.set('GENERAL', 'CSVTOETSPUNTEN', toetspunten_csv) #input_toetspunten
        nav_config.set('GENERAL', 'CSVINPUT1', hymstat_csv)

        #image output of naverkingstool will go to the same outputdir as the csv!! So if csv output is selected,
        #we MUST use that output-csv as the intermediate csv too
        if use_csv:
            log.info(" - using csv")
            if not output_csv.endswith('.csv'):
                output_csv += '.csv'
            nav_config.set('GENERAL', 'CSVOUTPUT1', output_csv)
        else:
            log.info(" - not using csv")
            output_csv = os.path.join(workspace, "nav_output.csv")
            nav_config.set('GENERAL', 'CSVOUTPUT1', output_csv)

        nav_config.set('GENERAL', 'CSVINPUT2', '')
        nav_config.set('GENERAL', 'CSVOUTPUT2', '')
        configfile = open(configfilename, "wb")
        nav_config.write(configfile)
        configfile.close()

        #----------------------------------------------------------------------------------------
        #call naverwerkingstool
        arguments = ""

        #change working path to exe directory
        os.chdir(location_script)

        #execute external program gridbewerking
        log.info("Naverwerking calculation")

        import subprocess
        naverwerking_exe = config.get('GENERAL', 'filename_naverwerking_exe')
        child = subprocess.Popen(os.path.join(location_script, naverwerking_exe) + arguments)
        child.wait()
        log.info("naverwerking.exe succesfully executed")

        """
        HIERONDER ALLES HERSCHRIJVEN
        """
        #----------------------------------------------------------------------------------------
        #post: write to database, table and/or csv
        log.info("C-1) Reading output csv")
        data_set = csv.DictReader(file(output_csv))

        #name is same as key is nothing is given; key is columnname from csv
        #alle velden die niet hier voorkomen, hoeven niet van naam worden veranderd en zijn van het type "long", precision 10, scale 5
        naverwerkingFields = {\
            gpgident: {"NAME": gpgident, "TYPE": "TEXT", "PRECISION": "10", "SCALE": "5", "LENGTH": "50"},\
            "X0": {"NAME": "X0", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "B": {"NAME": "B", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_2": {"NAME": "WS_2", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_5": {"NAME": "WS_5", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_10": {"NAME": "WS_10", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_15": {"NAME": "WS_15", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_20": {"NAME": "WS_20", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_25": {"NAME": "WS_25", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_50": {"NAME": "WS_50", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_100": {"NAME": "WS_100", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_I_S": {"NAME": "STA_TP_I_ST", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_I_H": {"NAME": "STA_TP_I_HL", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_I_A": {"NAME": "STA_TP_I_AK", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_I_G": {"NAME": "STA_TP_I_GR", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_O_S": {"NAME": "STA_TP_O_ST", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_O_H": {"NAME": "STA_TP_O_HL", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_O_A": {"NAME": "STA_TP_O_AK", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_O_G": {"NAME": "STA_TP_O_GR", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "T_I": {"NAME": "T_I", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "T_O": {"NAME": "T_O", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "RSLT_Bron": {"NAME": "RSLT_Bron", "TYPE": "TEXT", "LENGTH": "50", "PRECISION": "10", "SCALE": "5"},\
            "RSLT_Datum": {"NAME": "RSLT_Datum", "TYPE": "DATE", "PRECISION": "10", "SCALE": "5"},\
            }

        #convert columnnames in data_set
        data_set_converted = {}
        source_str = "hymstat: %s" % os.path.basename(input_resultaten)
        if len(source_str) > 50:
            source_str = source_str[:50]
        import time
        date_str = time.strftime('%x')

        for row in data_set:
            peilgebied_id = row['PEILVAKID']
            data_set_converted[peilgebied_id] = {gpgident: peilgebied_id}
            for key in row.keys():
                if key in naverwerkingFields:
                    data_set_converted[peilgebied_id][naverwerkingFields[key]["NAME"]] = row[key]

            data_set_converted[peilgebied_id]["RSLT_Bron"] = source_str
            data_set_converted[peilgebied_id]["RSLT_Datum"] = date_str

        #----------------------------------------------------------------------------------------
        #check if output_table exists. if not, create with correct rows
        log.info("C-2) Checking output table... ")
        if not(gp.exists(output_table)):
            gp.CreateTable(os.path.dirname(output_table), os.path.basename(output_table))

        #----------------------------------------------------------------------------------------
        #for key,row in naverwerkingFields.items():
        #	print row["NAME"]+" "+row["TYPE"]+" "+row["PRECISION"]+" "+row["SCALE"]
        #check if output_table has the correct rows
        log.info("C-3) Checking fields")
        for field_name, field_settings in naverwerkingFields.items():
            if not turtlebase.arcgis.is_fieldname(gp, output_table, field_settings['NAME']):
                if field_settings['TYPE'] == 'DOUBLE':
                    gp.AddField(output_table, field_settings['NAME'], field_settings['TYPE'], field_settings['PRECISION'], field_settings['SCALE'])
                elif field_settings['TYPE'] == 'TEXT':
                    gp.AddField(output_table, field_settings['NAME'], field_settings['TYPE'], '#', '#', field_settings['LENGTH'])
                else:
                    gp.AddField(output_table, field_settings['NAME'], field_settings['TYPE'], field_settings['PRECISION'], field_settings['SCALE'])

        # ---------------------------------------------------------------------------
        #add data to file_output
        turtlebase.arcgis.write_result_to_output(output_table, gpgident, data_set_converted)

        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        if os.path.isfile(toetspunten_csv):
            os.remove(toetspunten_csv)
        if os.path.isfile(hymstat_csv):
            os.remove(hymstat_csv)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 14
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #---------------------------------------------------------------------
        # Input parameters
        """
        nodig voor deze tool:
        """
        tempfiles = []
        if len(sys.argv) == 6:
            input_hydrovak = sys.argv[1]
            optional_area = sys.argv[2][:10]
            output_shapefile = sys.argv[3]
            optional_bottleneck_points = sys.argv[4]
            optional_terminal_points = sys.argv[5]            
        else:
            log.warning("usage: <input_hydrovak> <output_shapefile> <optional_bottleneck_points> <optional_terminal_points> <optional_area>")
            sys.exit(1)

        tolerance_points = float(config.get('netwerkanalyse', 'tolerance_points'))
        input_shapefile = turtlebase.arcgis.get_random_file_name(workspace , ".shp")
        tempfiles.append(input_shapefile)
        gp.select_analysis(input_hydrovak, input_shapefile)

        #---------------------------------------------------------------------
        # Check required fields in input data
        ovk_field = config.get('general', 'ovkident')

        if not turtlebase.arcgis.is_fieldname(gp, input_shapefile, "ovkident"):
            errormsg = "fieldname %s not available in %s" % (
                                    "ovkident", input_shapefile)
            log.error(errormsg)
        #---------------------------------------------------------------------
        # add from and to coordinates
        update_to_and_from_coordinates(gp, input_shapefile, ovk_field)

        network_data = read_table(gp, config, input_shapefile, optional_area)
        
        g = turtlebase.network.import_dbf_into_graph(config, network_data,
                                                     tolerance_points, optional_area)
        turtlebase.network.let_it_stream(g)

        #create output:
        fields_to_add = [('incoming', 'SHORT'),
                         ('examined', 'SHORT'),
                         ('terminal', 'SHORT'),
                         ('som_oppvl', 'DOUBLE'),
                         ('bottleneck', 'SHORT'),
                         ('flip', 'SHORT')]
        gp.select_analysis(input_shapefile, output_shapefile)

        #fields_to_add = {'incoming':"SHORT",'examined':"SHORT",'terminal':"SHORT", 'cum_urban':"DOUBLE", 'cum_rural':"DOUBLE", 'bottleneck':"SHORT", 'flip':"SHORT"} #'ovkident':"TEXT",
        for field_to_add in fields_to_add:
            field_name = field_to_add[0]
            field_type = field_to_add[1]
            if turtlebase.arcgis.is_fieldname(gp, output_shapefile, field_name):
                gp.DeleteField_management(output_shapefile, field_name)
                gp.AddField_management(output_shapefile, field_name, field_type)
                log.info("Adding field %s" % field_name)
            else:
                gp.AddField_management(output_shapefile, field_name, field_type)
                log.info("Adding field %s" % field_name)

        turtlebase.network.save_result_shapefile(gp, config, g, output_shapefile)

        log.info("Recognizing bottlenecks")
        log.debug("create field to store bottlenecks")

        row = gp.UpdateCursor(output_shapefile)
        for item in nens.gp.gp_iterator(row):
            examined = item.getValue(config.get('netwerkanalyse', 'examined'))
            incoming = item.getValue(config.get('netwerkanalyse', 'incoming'))
            terminal = item.getValue(config.get('netwerkanalyse', 'terminal'))

            if terminal == 1 and examined == 0:
                item.SetValue(config.get('netwerkanalyse', 'bottleneck'), incoming)


            if incoming > 1 and examined == 0:
                item.SetValue(config.get('netwerkanalyse', 'bottleneck'), incoming)
            row.UpdateRow(item)

        # als de gebruiker heeft aangegeven de terminal points als puntenbestand te hebben
        # moeten eerst de begin x en begin y worden opgeslagen in een dictionary. daarvan
        # kan dan een puntenbestand gemaakt worden met functie
        if optional_bottleneck_points != '#':
            temp_shape = turtlebase.arcgis.get_random_file_name(workspace , ".shp")
            tempfiles.append(temp_shape)
            log.info("Creating bottleneck points file")
            create_point_file_from_polyline(gp, config, output_shapefile, temp_shape, 'bottlenecks')
            gp.Select_analysis(temp_shape, optional_bottleneck_points)
        # als de gebruiker heeft aangegeven de terminal points als puntenbestand te hebben
        # moeten eerst de begin x en begin y worden opgeslagen in een dictionary. daarvan
        # kan dan een puntenbestand gemaakt worden met functie

        if optional_terminal_points != "#":
            temp_shape2 = turtlebase.arcgis.get_random_file_name(workspace , ".shp")
            tempfiles.append(temp_shape2)
            log.info("Creating terminal points file")
            create_point_file_from_polyline(gp, config, output_shapefile, temp_shape2, 'terminals')
            gp.Select_analysis(temp_shape2, optional_terminal_points)

        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)
            turtlebase.arcgis.remove_tempfiles(gp, log, tempfiles)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 15
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        #check inputfields
        log.info("Getting commandline parameters")
        if len(sys.argv) == 5:
            input_peilgebieden_feature = sys.argv[1]
            input_kunstwerken_feature = sys.argv[2]
            input_afvoer_table = sys.argv[3]
            output_feature = sys.argv[4]
        else:
            log.error("Usage: python rural_afvoerrelaties.py \
            <peilgebieden feature> <kunstwerken feature> \
            <afvoerrelaties table> <output feature>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        #check input parameters
        gpgident = config.get('GENERAL', 'gpgident').lower()
        kwkident = config.get('GENERAL', 'kwkident').lower()

        log.info('Checking presence of input files')
        if not(gp.exists(input_peilgebieden_feature)):
            log.error("inputfile peilgebieden %s does not exist!" % input_peilgebieden_feature)
            sys.exit(5)

        if not(gp.exists(input_afvoer_table)):
            log.error("inputfile afvoerrelaties %s does not exist!" % input_afvoer_table)
            sys.exit(5)

        log.info('Input parameters checked')
        #----------------------------------------------------------------------------------------
        log.info("Prepare input_peilgebieden_feature")
        temp_peilgebieden_feature = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Select_analysis(input_peilgebieden_feature, temp_peilgebieden_feature)

        add_centroids(gp, temp_peilgebieden_feature)
        peilgebieden_dict = nens.gp.get_table(gp, temp_peilgebieden_feature, primary_key=gpgident)

        if input_kunstwerken_feature != '#':
            log.info("Prepare input_kunstwerken_feature")
            temp_kunstwerken_feature = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            gp.Select_analysis(input_kunstwerken_feature, temp_kunstwerken_feature)

            gp.addxy(temp_kunstwerken_feature)
            kunstwerken_dict = nens.gp.get_table(gp, temp_kunstwerken_feature, primary_key=kwkident)
        else:
            kunstwerken_dict = {}

        log.info("Reading input_afvoer_table")
        relaties_dict = nens.gp.get_table(gp, input_afvoer_table, primary_key=kwkident)

        log.info("Calculating afvoerrelaties")
        afvoer_van = config.get('afvoerrelaties', 'input_peilg_from').lower()
        afvoer_naar = config.get('afvoerrelaties', 'input_peilg_to').lower()

        output_relations = {}
        data_source = "pg: %s, kw: %s, rel: %s" % (os.path.basename(input_peilgebieden_feature),
                                                   os.path.basename(input_kunstwerken_feature),
                                                   os.path.basename(input_afvoer_table))
        data_source = data_source[:50]

        for relation, attributes in relaties_dict.items():
            id_from = attributes[afvoer_van]
            id_to = attributes[afvoer_naar]
            item_id = "%s_%s" % (id_from, id_to)
            coords = []
            # get start coords
            x1 = peilgebieden_dict[id_from]['point_x']
            y1 = peilgebieden_dict[id_from]['point_y']
            coords.append((x1, y1))

            if relation in kunstwerken_dict:
                x2 = kunstwerken_dict[relation]['point_x']
                y2 = kunstwerken_dict[relation]['point_y']
                coords.append((x2, y2))

            if id_to in peilgebieden_dict:
                x3 = peilgebieden_dict[id_to]['point_x']
                y3 = peilgebieden_dict[id_to]['point_y']
            else:
                x3 = x1 + 10
                y3 = y1 + 10
            coords.append((x3, y3))

            output_relations[item_id] = {"Relation_id": item_id, "From": id_from, "To": id_to,
                                         "Structure": relation, "Source": data_source, "coords": coords}

        #put new data in output_table
        insert_count = draw_lines_from_dict(gp, output_relations, output_feature)
        log.info(" - %s records has been inserted" % insert_count)

        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 16
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        # Input parameters
        if len(sys.argv) == 5:
            # input parameter
            input_external_weir = sys.argv[1]
            input_voronoi_polygon = sys.argv[2]
            input_rrcf_waterlevel = sys.argv[3]
            # output parameters
            output_table_external_weir = sys.argv[4]
        else:
            log.error("usage: <input_external_weir> <input_voronoi_polygon> <input rrcf waterlevel> <output_table_external_weir>")
            sys.exit(1)

        temp_voronoi = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.select_analysis(input_voronoi_polygon, temp_voronoi)
        #----------------------------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        log.debug(" - check input_external_weir: %s" % input_external_weir)
        if gp.describe(input_external_weir).ShapeType != 'Point':
            log.error("Input_external_weir is not a point feature class!")
            geometry_check_list.append(input_external_weir + " -> (Point)")

        log.debug(" - check voronoi polygon: %s" % temp_voronoi)
        if gp.describe(temp_voronoi).ShapeType != 'Polygon':
            log.error("Input voronoi is not a polygon feature class!")
            geometry_check_list.append(temp_voronoi + " -> (Polygon)")

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        #----------------------------------------------------------------------------------------
        # Check required fields in database
        log.info("Check required fields in input data")

        missing_fields = []
        if not turtlebase.arcgis.is_fieldname(gp, temp_voronoi, config.get('toetsing_overstorten', 'calculation_point_ident')):
            log.debug(" - missing: %s in %s" % (config.get('toetsing_overstorten', 'calculation_point_ident'), temp_voronoi))
            missing_fields.append("%s: %s" % (temp_voronoi, config.get('toetsing_overstorten', 'calculation_point_ident')))

        if not turtlebase.arcgis.is_fieldname(gp, input_rrcf_waterlevel, config.get('toetsing_overstorten', 'field_waterstand')):
            log.debug(" - missing: %s in %s" % (config.get('toetsing_overstorten', 'field_waterstand'), input_rrcf_waterlevel))
            missing_fields.append("%s: %s" % (input_rrcf_waterlevel, config.get('toetsing_overstorten', 'field_waterstand')))

        if not turtlebase.arcgis.is_fieldname(gp, input_external_weir, config.get('toetsing_overstorten', 'overstort_ident')):
            log.debug(" - missing: %s in %s" % (config.get('toetsing_overstorten', 'overstort_ident'), input_external_weir))
            missing_fields.append("%s: %s" % (input_external_weir, config.get('toetsing_overstorten', 'overstort_ident')))

        if not turtlebase.arcgis.is_fieldname(gp, input_external_weir, config.get('toetsing_overstorten', 'drempelhoogte')):
            log.debug(" - missing: %s in %s" % (config.get('toetsing_overstorten', 'drempelhoogte'), input_external_weir))
            missing_fields.append("%s: %s" % (input_external_weir, config.get('toetsing_overstorten', 'drempelhoogte')))

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)

        #----------------------------------------------------------------------------------------
        # read waterlevel table as a dictionary
        log.info("Read waterlevel table")
        waterlevel_dict = nens.gp.get_table(gp, input_rrcf_waterlevel, primary_key=config.get('toetsing_overstorten', 'calculation_point_ident').lower())
        log.debug(waterlevel_dict)

        # Add fields to output
        if not turtlebase.arcgis.is_fieldname(gp, temp_voronoi, config.get('toetsing_overstorten', 'field_waterstand')):
            log.info(" - add field %s" % config.get('toetsing_overstorten', 'field_waterstand'))
            gp.addfield(temp_voronoi, "%s" % config.get('toetsing_overstorten', 'field_waterstand'), "double")

        
        # copy waterlevel to voronoi polygons
        field_config_waterstand = config.get('toetsing_overstorten', 'field_waterstand').lower()
        field_calculation_point_ident = config.get('toetsing_overstorten', 'calculation_point_ident')
        
        rows = gp.UpdateCursor(temp_voronoi)
        for row in nens.gp.gp_iterator(rows):
            row_id = row.GetValue(field_calculation_point_ident)
            
            if waterlevel_dict.has_key(row_id):
                log.debug(waterlevel_dict[row_id])
                row.SetValue(field_config_waterstand, waterlevel_dict[row_id][field_config_waterstand])

                rows.UpdateRow(row)

        #----------------------------------------------------------------------------------------
        # Join external weirs to voronoi using spatial location (spatial join)
        log.info("join waterlevel to external weirs using a spatial location")
        temp_spatial_join = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        #gp.SpatialJoin_analysis(input_external_weir, temp_voronoi, temp_spatial_join, "JOIN_ONE_TO_ONE", "#", "#", "INTERSECTS")
        gp.Intersect_Analysis(input_external_weir + ';' + temp_voronoi, temp_spatial_join)
        
        external_weir_dict = nens.gp.get_table(gp, temp_spatial_join, primary_key=config.get('toetsing_overstorten', 'overstort_ident').lower())

        result_dict = {}
        for k, v in external_weir_dict.items():
            waterlevel = v[config.get('toetsing_overstorten', 'field_waterstand').lower()]
            weir_height = v[config.get('toetsing_overstorten', 'drempelhoogte').lower()]
            if waterlevel is None or weir_height is None:
                waterlevel = -999
                weir_height = -999
                result_value = 9
            else:
                if float(waterlevel) > float(weir_height):
                    result_value = 1
                else:
                    result_value = 0

            result_dict[k] = {config.get('toetsing_overstorten', 'overstort_ident'): k,
                              config.get('toetsing_overstorten', 'field_waterstand'): waterlevel,
                              config.get('toetsing_overstorten', 'drempelhoogte'): weir_height,
                              config.get('toetsing_overstorten', 'field_toetsing_overlast_stedelijk'): result_value}
        #----------------------------------------------------------------------------------------
        # Create output table
        if not gp.exists(output_table_external_weir):
            log.info("Create new output table")
            temp_result_table = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            gp.CreateTable_management(os.path.dirname(temp_result_table), os.path.basename(temp_result_table))
            copy_table = True
        else:
            temp_result_table = output_table_external_weir
            copy_table = False

        fields_to_add = [config.get('toetsing_overstorten', 'field_waterstand'),
                         config.get('toetsing_overstorten', 'drempelhoogte'),
                         config.get('toetsing_overstorten', 'field_toetsing_overlast_stedelijk')]

        if not turtlebase.arcgis.is_fieldname(gp, temp_result_table, config.get('toetsing_overstorten', 'overstort_ident')):
            log.debug(" - add field %s to %s" % (config.get('toetsing_overstorten', 'overstort_ident'), temp_result_table))
            gp.addfield_management(temp_result_table, config.get('toetsing_overstorten', 'overstort_ident'), 'text')

        for field in fields_to_add:
            if not turtlebase.arcgis.is_fieldname(gp, temp_result_table, field):
                log.debug(" - add field %s to %s" % (field, temp_result_table))
                gp.addfield_management(temp_result_table, field, 'double')

        #----------------------------------------------------------------------------------------
        # Write results to output table
        log.info("Write results to output table")
        turtlebase.arcgis.write_result_to_output(temp_result_table, config.get('toetsing_overstorten', 'overstort_ident').lower(), result_dict)

        if copy_table == True:
            gp.TableToTable_conversion(temp_result_table, os.path.dirname(output_table_external_weir), os.path.basename(output_table_external_weir))

        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 17
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #---------------------------------------------------------------------
        # Input parameters
        """
        nodig voor deze tool:
        """
        if len(sys.argv) == 4:
            user_input = sys.argv[1]
            flip_field = sys.argv[2].lower()
            output_shape = sys.argv[3]
        else:
            log.warning("usage: <user_input> <output_shape>")
            sys.exit(1)

        tempfiles = []
        input_shape = turtlebase.arcgis.get_random_file_name(workspace, '.shp')
        gp.Select_analysis(user_input, input_shape)
        #---------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        #log.debug(" - check <input >: %s" % argument1)
        if not turtlebase.arcgis.is_file_of_type(gp, input_shape, 'Polyline'):
            log.error("%s is not a %s feature class!" % (input_shape, 'Polyline'))
            geometry_check_list.append("%s -> (%s)" % (input_shape, 'Polyline'))

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        #---------------------------------------------------------------------
        # Check required fields in input data
        ovk_field = config.get('general', 'ovkident').lower()
        missing_fields = []
        check_fields = {input_shape: ['Sum_OPP_LA', 'Sum_OPP_ST',
                        ovk_field, 'from_x', 'from_y', 'to_x', 'to_y', flip_field]}

        for input_fc, fieldnames in check_fields.items():
            for fieldname in fieldnames:
                if not turtlebase.arcgis.is_fieldname(
                        gp, input_fc, fieldname):
                    errormsg = "fieldname %s not available in %s" % (
                                    fieldname, input_fc)
                    log.error(errormsg)
                    missing_fields.append(errormsg)

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #---------------------------------------------------------------------
        #create output:
        fields_to_add = [(ovk_field, 'TEXT'),
                         ('incoming', 'SHORT'),
                         ('examined', 'SHORT'),
                         ('terminal', 'SHORT'),
                         ('som_sted', 'DOUBLE'),
                         ('som_land', 'DOUBLE'),
                         ('som_totaal', 'DOUBLE'),
                         ('bottleneck', 'SHORT'),
                         (flip_field, 'SHORT')]
        gp.select_analysis(input_shape, output_shape)

        new_feat = {}
        new_geometry = {}
        log.info("Inlezen geometrie en omdraaien van de geometrie")

        fieldnames_dict = nens.gp.get_table_def(gp, input_shape)
        log.debug(fieldnames_dict)
        desc = gp.describe(input_shape)
        count = 0
        rows = gp.SearchCursor(input_shape)
        row = rows.Next()
        while row:

            flip_boolean = row.getValue(flip_field)

            if flip_boolean == 1:
                count += 1
                #read features
                feat = row.getValue(desc.ShapeFieldName)
                ovkident = row.getValue(ovk_field)
                new_feat = flip_geometry(gp, feat, ovkident, new_feat)
                ##new_feat = feat

                #store geometry information in dictionary
                if ovkident not in new_geometry:
                    new_geometry[ovkident] = {}
                #store all information from the attribute table
                for column in fields_to_add:
                    column = column[0]

                    #columns with from en to for x and y need to be switched as well
                    if column == 'from_x':
                        lookup_column = 'to_x'
                    elif column == 'from_y':
                        lookup_column = 'to_y'
                    elif column == 'to_y':
                        lookup_column = 'from_y'
                    elif column == 'to_x':
                        lookup_column = 'from_x'
                    else:
                        # no switch needed
                        lookup_column = column

                    if column != 'opm':
                        if lookup_column in fieldnames_dict:
                            update_value = row.getValue(lookup_column)
                            try:
                                float_value = float(update_value)
                                new_geometry[ovkident][column] = float_value
                            except:
                                log.debug("geen float")
                                new_geometry[ovkident][column] = row.getValue(lookup_column)
                            log.debug(new_geometry[ovkident][column])
                #waterlijn wordt opgeslagen in dictionary
                if column == 'opm':
                    new_geometry[ovkident][column] = "Lijn is omgedraaid"
                log.info("Opslaan van waterlijn: " + str(ovkident))
            row = rows.Next()
        del row, rows
        #remove the lines that are going to be flipped

        removed_lines = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        #alleen als er inderdaad lijnen gedraaid worden moet de tempfile aangemaakt worden.
        gp.select_analysis(input_shape, removed_lines)

        #first remove lines that are going to be duplicate in the end result. lines are
        # remove from a copy of the input file.
        row = gp.UpdateCursor(removed_lines)
        log.info("Verwijder dubbele rijen")
        for item in nens.gp.gp_iterator(row):
            if item.getValue(flip_field) == 1:
                row.DeleteRow(item)

        temp_shape = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        tempfiles.append(temp_shape)

        #creates new lines in workspace with same name as output_shape
        count = create_line_from_dict(gp, workspace_gdb, new_feat, fields_to_add, new_geometry, temp_shape)
        
        if count == 0:
            log.warning("Er zijn geen lijnen omgedraaid")
            log.warning("Door de gebruiker is in de kolom " + str(flip_field) + " geen 1 ingevuld")
        else:
            tempfiles.append(removed_lines)

        #merge new lines with output
        gp.Merge_management(temp_shape + ";" + removed_lines, output_shape)

        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)
            turtlebase.arcgis.remove_tempfiles(gp, log, tempfiles)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = os.path.join(config.get('GENERAL', 'location_temp'))
        if workspace == "-":
            import tempfile
            workspace = tempfile.gettempdir()
        workspace_folder = turtlebase.arcgis.get_random_layer_name()
        workspace_shp = os.path.join(workspace, workspace_folder)
        os.makedirs(workspace_shp)

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #---------------------------------------------------------------------
        # Input parameters
        if len(sys.argv) == 8:
            peilgebieden = get_layer_full_path(gp, sys.argv[1])
            rr_peilgebied = get_layer_full_path(gp, sys.argv[2])
            hoogtekaart = get_layer_full_path(gp, sys.argv[3])
            rr_maaiveld = get_layer_full_path(gp, sys.argv[4])
            """Optional arguments for NBW analysis
            """
            if sys.argv[5] != '#':
                landgebruik = get_layer_full_path(gp, sys.argv[5])
            else:
                landgebruik = '#'
        
            if sys.argv[6] != '#':
                conversietabel = get_layer_full_path(gp, sys.argv[6])
            else:
                conversietabel = '#'
            
            if sys.argv[7] != '#':
                rr_toetspunten = get_layer_full_path(gp, sys.argv[7])
            else:
                rr_toetspunten = '#'
        else:
            log.warning("usage: <peilgebieden> <rr_peilgebied> <hoogtekaart> <rr_maaiveld> {landgebruik} {conversietabel} {rr_toetspunten}")
            sys.exit(1)
            
        kaartbladen = os.path.join(os.path.dirname(sys.argv[0]), "kaartbladen", "kaartbladen.shp")
        gpgident = config.get('general', 'gpgident')
        mv_procent = config.get("maaiveldkarakteristiek", "mv_procent")
        lgn_code = config.get('maaiveldkarakteristiek', 'lgn_code')
        nbw_klasse = config.get('maaiveldkarakteristiek', 'nbw_klasse')
        
        if landgebruik != '#':
            if conversietabel == '#':
                log.error("When you use a landuse map, a conversion table is required!")
                sys.exit(2)
            if rr_toetspunten == '#':
                rr_toetspunten = os.path.join(workspace_gdb, "rr_toetspunten")
                log.warning("You did not specify a output table for the RR_TOETSPUNTEN")
                log.warning(" - output will be written to %s" % rr_toetspunten)
                gp.CreateTable_management(os.path.dirname(rr_toetspunten), os.path.basename(rr_toetspunten))
        
        #---------------------------------------------------------------------
        # Environments
        geometry_check_list = []
        if gp.describe(hoogtekaart).PixelType[0] not in ['F']:
            log.info(gp.describe(hoogtekaart).PixelType)
            log.error("Input AHN is an integer raster, for this script a float is required")
            geometry_check_list.append(hoogtekaart + " -> (Float)")
            
        if landgebruik != '#':
            if gp.describe(landgebruik).PixelType[0] in ['F']:
                log.info(gp.describe(landgebruik).PixelType)
                log.error("Input landgebruik is a float raster, for this script a integer is required")
                geometry_check_list.append(hoogtekaart + " -> (Float)")
            
            cellsize_ahn = gp.describe(hoogtekaart).MeanCellHeight
            cellsize_landgebruik = gp.describe(landgebruik).MeanCellHeight
            if not cellsize_ahn == cellsize_landgebruik:
                log.error("The cellsize of input AHN2 is %s, the cellsize of landuse is %s. They should be the same" % (
                                                                                                                       cellsize_ahn,
                                                                                                                       cellsize_landgebruik))
                geometry_check_list.append("Change cellsize of %s" % landgebruik)
        
        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)

            
        gp.MakeFeatureLayer_management(kaartbladen, "krtbldn_lyr")
        gp.MakeFeatureLayer_management(peilgebieden, "gpg_lyr")
        gp.SelectLayerByLocation_management("krtbldn_lyr","INTERSECT","gpg_lyr","#","NEW_SELECTION")
        kaartbladen_prj = turtlebase.arcgis.get_random_file_name(workspace_shp, '.shp')
        gp.Select_analysis("krtbldn_lyr", kaartbladen_prj)
        peilgebieden_shp = turtlebase.arcgis.get_random_file_name(workspace_shp, '.shp')
        gp.Select_analysis("gpg_lyr", peilgebieden_shp)

        streefpeilen = {}
        rows_gpg = gp.SearchCursor(rr_peilgebied)
        row_gpg = rows_gpg.next()
        while row_gpg:
            gpg_id = row_gpg.getValue('gpgident')
            streefpeil = row_gpg.getValue('zomerpeil')
            streefpeilen[gpg_id] = streefpeil
            row_gpg = rows_gpg.next()
            
        conversion = {}
        if conversietabel != '#':
            rows_conv = gp.SearchCursor(conversietabel)
            row_conv = rows_conv.next()
            while row_conv:
                lgn = row_conv.GetValue(lgn_code)
                nbw = row_conv.GetValue(nbw_klasse)
                conversion[lgn] = nbw
                row_conv = rows_conv.next()
                
        rows = gp.SearchCursor(peilgebieden)
        row = rows.next()
        mvcurve_dict = {}        
        maxpeil = float(config.get('maaiveldkarakteristiek', 'max_hoogte'))
        
        if landgebruik != '#':
            nbw_dict = {}
            nbw_stedelijk = int(config.get('maaiveldkarakteristiek', 'nbw_stedelijk'))
            stedelijk_procent = int(config.get('maaiveldkarakteristiek', 'stedelijk_procent'))
            nbw_hoogwaardig = int(config.get('maaiveldkarakteristiek', 'nbw_hoogwaardig'))
            hoogwaardig_procent = int(config.get('maaiveldkarakteristiek', 'hoogwaardig_procent'))
            nbw_akkerbouw = int(config.get('maaiveldkarakteristiek', 'nbw_akkerbouw'))
            akkerbouw_procent = int(config.get('maaiveldkarakteristiek', 'akkerbouw_procent'))
            nbw_grasland = int(config.get('maaiveldkarakteristiek', 'nbw_grasland'))
            grasland_procent = int(config.get('maaiveldkarakteristiek', 'grasland_procent'))

        while row:
            gpg_value = row.getValue(gpgident)
            log.info(" - processing area %s" %  gpg_value)
            gpg_lyr = turtlebase.arcgis.get_random_layer_name()
            gp.MakeFeatureLayer_management(peilgebieden_shp, gpg_lyr, "%s = '%s'" % ('"' + gpgident + '"', gpg_value))
            tmp_gpg = turtlebase.arcgis.get_random_file_name(workspace_shp, '.shp')
            gp.Select_analysis(gpg_lyr, tmp_gpg)
        
            streefpeil = float(streefpeilen[gpg_value])
            curve, curve_per_landuse = maaiveldcurve.main(tmp_gpg, kaartbladen_prj, landgebruik, hoogtekaart, streefpeil, maxpeil, conversion, workspace_shp)
            mvcurve_dict[gpg_value] = {gpgident: gpg_value}
            
            for i in mv_procent.split(', '):
                v = curve[0][1][int(i)]
                mvcurve_dict[gpg_value]["MV_HGT_%s" % i] = math.ceil(v*100)/100
            
            if landgebruik != '#':
                nbw_dict[gpg_value] = {gpgident: gpg_value}
                
                if nbw_stedelijk in curve_per_landuse:
                    nbw_dict[gpg_value]['DFLT_I_ST'] = curve_per_landuse[nbw_stedelijk][1][stedelijk_procent]
                    nbw_dict[gpg_value]['DFLT_O_ST'] = (curve_per_landuse[nbw_stedelijk][1][10] + streefpeil) / 2
                else:
                    nbw_dict[gpg_value]['DFLT_I_ST'] = NODATA
                    nbw_dict[gpg_value]['DFLT_O_ST'] = NODATA
                    
                if nbw_hoogwaardig in curve_per_landuse:
                    nbw_dict[gpg_value]['DFLT_I_HL'] = curve_per_landuse[nbw_hoogwaardig][1][hoogwaardig_procent]
                    nbw_dict[gpg_value]['DFLT_O_HL'] = (curve_per_landuse[nbw_hoogwaardig][1][10] + streefpeil) / 2
                else:
                    nbw_dict[gpg_value]['DFLT_I_HL'] = NODATA
                    nbw_dict[gpg_value]['DFLT_O_HL'] = NODATA
                    
                if nbw_akkerbouw in curve_per_landuse:
                    nbw_dict[gpg_value]['DFLT_I_AK'] = curve_per_landuse[nbw_akkerbouw][1][akkerbouw_procent]
                    nbw_dict[gpg_value]['DFLT_O_AK'] = (curve_per_landuse[nbw_akkerbouw][1][10] + streefpeil) / 2
                else:
                    nbw_dict[gpg_value]['DFLT_I_AK'] = NODATA
                    nbw_dict[gpg_value]['DFLT_O_AK'] = NODATA
                    
                if nbw_grasland in curve_per_landuse:
                    nbw_dict[gpg_value]['DFLT_I_GR'] = curve_per_landuse[nbw_grasland][1][grasland_procent]
                    nbw_dict[gpg_value]['DFLT_O_GR'] = (curve_per_landuse[nbw_grasland][1][10] + streefpeil) / 2
                else:
                    nbw_dict[gpg_value]['DFLT_I_GR'] = NODATA
                    nbw_dict[gpg_value]['DFLT_O_GR'] = NODATA
                
            gp.delete(tmp_gpg)
            row = rows.next()
            
        if landgebruik != '#':
            tp_fields = ["GPGIDENT", "DFLT_I_ST", "DFLT_I_HL", "DFLT_I_AK", "DFLT_I_GR",
                         "DFLT_O_ST", "DFLT_O_HL", "DFLT_O_AK", "DFLT_O_GR",
                         "MTGMV_I_ST", "MTGMV_I_HL", "MTGMV_I_AK", "MTGMV_I_GR", 
                         "MTGMV_O_ST", "MTGMV_O_HL", "MTGMV_O_AK", "MTGMV_O_GR"]
            for tp_field in tp_fields:
                if not turtlebase.arcgis.is_fieldname(gp, rr_toetspunten, tp_field):
                    gp.addfield_management(tp_field, "TEXT")
        #---------------------------------------------------------------------
        turtlebase.arcgis.write_result_to_output(rr_maaiveld, gpgident, mvcurve_dict)
        if landgebruik != '#':
            turtlebase.arcgis.write_result_to_output(rr_toetspunten, gpgident, nbw_dict)
        
        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.info("delete temporary folder: %s" % workspace_shp)
            shutil.rmtree(workspace_shp)
            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace)
       
        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()
        log.info("workspace: %s" % workspace)

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #------------------------------------------------
        log.info("Reading and checking input")
        rekenpunten = sys.argv[1]
        waterlijnen = sys.argv[2]
        peilgebieden = sys.argv[3] #optioneel
        output_bergingstakken = sys.argv[4]
        gpgident = config.get('General', 'gpgident')
        if turtlebase.arcgis.is_fieldname(gp, peilgebieden, gpgident):
            peilgebieden_list = nens.gp.get_table(gp, peilgebieden, primary_key=gpgident.lower())
        else:
            log.error("field %s is missing in %s", gpgident, peilgebieden)
            sys.exit(1)

        if not turtlebase.arcgis.is_fieldname(gp, rekenpunten, gpgident):
            log.error("field %s is missing in %s", gpgident, rekenpunten)
            sys.exit(1)

        log.info("Controleer of de opgegeven bestandsnamen arcgis compatibel zijn")

        for argv in sys.argv[1:]:
            turtlebase.filenames.check_filename(argv)

        #uitlezen x en y coordinaat van de rekenpunten
        log.info("Inlezen rekenpunten")

        rekenpunten_x_y_coordinaten = bepalen_x_y_coordinaat(gp, rekenpunten, gpgident)
        log.info("Kopieer " + waterlijnen + " naar de workspace")
        waterlijnen_lokaal = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        log.debug("Kopieer de waterlijnen naar een lokale directory ")
        gp.select_analysis(waterlijnen, waterlijnen_lokaal)
        log.info("Bereken eindpunten van potentiele bergingstakken rondom rekenpunten")
        dict_stars = create_dict_stars_around_rekenpunten(peilgebieden_list, config, rekenpunten_x_y_coordinaten)

        joined_dictionaries = join_dictionaries(dict_stars, rekenpunten_x_y_coordinaten)
        star = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        log.info("Aanmaken potentiele bergingstakken vanuit rekenpunt ")

        createLineFromPoints(gp, joined_dictionaries, 'gpgident', star)
        intersect = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        log.info("Bereken kruisingen van potentiele bergingstakken met waterlijnen")
        #Buffer_analysis (in_features, out_feature_class, buffer_distance_or_field, line_side, line_end_type, dissolve_option, dissolve_field)
       
        gp.Intersect_analysis(star + ";" + waterlijnen_lokaal, intersect, "#", "#", "POINT")
        intersect_x_y_coordinaten = bepalen_x_y_coordinaat(gp, intersect, gpgident)

        remainingpoints_to_be_removed_from_star = remove_duplicate_values_from_dictionaries(rekenpunten_x_y_coordinaten, intersect_x_y_coordinaten)

        #nu remainingpoints_to_be_removed_from_star dictionary de keys vergelijken met de id in star en dan record verwijderen

        log.info("Bepaal overgebleven eindpunten van bergingstakken")
        remove_records_from_shapefile_based_on_keys_in_dict(gp, star, gpgident, remainingpoints_to_be_removed_from_star)

        star_punten = turtlebase.arcgis.get_random_file_name(workspace_gdb)

        #nu worden coordinaten uitgelezen uit de star_punten shape (lijnen)
        log.info("Bereken ideale bergingstak")
        create_points_from_dict(gp, dict_stars, star_punten, gpgident)

        intersect2 = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Intersect_analysis(star_punten + ";" + star, intersect2, "#", "#", "POINT")
        log.info("Bereken afstand potentiele bergingstakken naar waterlijn")
        log.debug("Als eerste wordt een buffer aangemaakt ")

        buffer_star = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Buffer_analysis(rekenpunten, buffer_star, int(config.get('bergingstakken', 'length_of_breach')))
        snijpunt_waterlijn = turtlebase.arcgis.get_random_file_name(workspace_gdb)

        log.debug("Nu intersect van de buffer met de waterlijnen. Deze punten worden gebruikt om de afstand naar de waterlijn te berekenen ")
        gp.Intersect_analysis(buffer_star + ";" + waterlijnen_lokaal, snijpunt_waterlijn, "#", "#", "POINT")

        log.debug("Nu wegschrijven van de coordinaten van de snijpunten met de waterlijn naar een dictionary")
        snijpunten_waterlijn_dict = bepalen_x_y_coordinaat_meerdere_punten(gp, snijpunt_waterlijn, gpgident)

        log.debug("Nu wegschrijven van de coordinaten van de overgebleven punten van de ster naar een dictionary")
        punten_star_dict = bepalen_x_y_coordinaat_meerdere_punten(gp, intersect2, gpgident)

        log.debug("Er zijn 2 of meer punten op de waterlijn waarnaar de punten van de ster een afstand hebben")
        log.debug("Berekend wordt welke de minimale afstand oplevert tussen punt van de ster en waterlijn")
        #nu afstand berekenen mbv de distance calculator uit vorige script tussen snijpunten_waterlijn_dict en intersect2
        minimaldistance_dict_star_points = calculate_distance_between_points(snijpunten_waterlijn_dict, punten_star_dict)
        log.info("Berekend wordt welk punt van de bergingstak het verst van de waterlijn verwijderd is")
        list_with_ideal_points = bepaal_ideale_punt_bergingstak(minimaldistance_dict_star_points)
        out_data = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Copy_management (star, out_data)

        log.info("Selecteer de bergingstakken die loodrecht op waterlijn staan")
        remove_records_from_shapefile_not_in_list(gp, star, gpgident, list_with_ideal_points)
        #koppel de lijnen aan de RR_oppervlak tabel en neem de openwat_HA waarden over
        log.debug("De gpgident wordt weer teruggehaald ui de unieke peilgebiedid")
        clean_up_star(gp, star, gpgident)
        #intersect van star met zichzelf. als er iets overblijft dan geef een warning met de betreffende peilgebied id, mededeling
        # voor de gebruiker dat hij/zij daar even handmatig wat aan aan moet passen.
        log.info("Creeeren out_shape bergingstakken")
        log.info('%s  star' %star)
        log.info('%s  star'% output_bergingstakken)
        gp.select_analysis(star, output_bergingstakken)


        
        
        log.info("Check of er bergingstakken zijn die overlappen ")
        try:
            intersect3 = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            gp.Intersect_analysis(output_bergingstakken, intersect3, "#", "#", "POINT")
            #loop door de output van de intersect en geeft de GPGident weer als deze in de attribute table staat
            row = gp.SearchCursor(intersect3)
            for item in nens.gp.gp_iterator(row):
                gpg_ident = item.getValue(gpgident)
                log.warning("In peilgebied " + str(gpg_ident) + " overlapt de bergingstak met een andere bergingstak. Pas dit handmatig aan!")
        except (RuntimeError, TypeError, NameError):
            log.info('Geen overlap aanwezig')    
            
        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()

    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        #check inputfields
        log.info("Getting command parameters")
        if len(sys.argv) == 3:
            input_oppervlak = sys.argv[1]
            input_gewassen = sys.argv[2]
        else:
            log.error("Usage: python rural_correctie_oppervlakken.py <RR_Oppervlak>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        log.info("Correcting parameters")
        #check fields
        opm_correc_field = 'OPM_CORREC'
        if not turtlebase.arcgis.is_fieldname(gp, input_oppervlak, opm_correc_field):
            gp.AddField(input_oppervlak, opm_correc_field, 'TEXT', '#', '#', 50)

        gpgident_field = config.get('GENERAL', 'gpgident')
        area_field = config.get('OppervlakteParameters', 'input_oppervlak_area')
        verhard_field = config.get('OppervlakteParameters', 'input_oppervlak_verhard')
        onvsted_field = config.get('OppervlakteParameters', 'input_oppervlak_onvsted')
        onvland_field = config.get('OppervlakteParameters', 'input_oppervlak_onvland')
        kassen_field = config.get('OppervlakteParameters', 'input_oppervlak_kassen')
        openwat_field = config.get('OppervlakteParameters', 'input_oppervlak_openwat')

        input_check_bound_lower = float(config.get('OppervlakteParameters', 'input_check_bound_lower'))
        input_check_bound_upper = float(config.get('OppervlakteParameters', 'input_check_bound_upper'))

        rr_oppervlak_dict = {}
        rows = gp.UpdateCursor(input_oppervlak)    
        row = rows.next()
        while row:
            ident = row.GetValue(gpgident_field)
            area = row.GetValue(area_field)
            if area is None:
                area = 0
            verhard = row.GetValue(verhard_field)
            if verhard is None:
                verhard = 0
            onvsted = row.GetValue(onvsted_field)
            if onvsted is None:
                onvsted = 0
            onvland = row.GetValue(onvland_field)
            if onvland is None:
                onvland = 0                            
            kassen = row.GetValue(kassen_field)
            if kassen is None:
                kassen = 0
            openwat = row.GetValue(openwat_field)
            if openwat is None:
                openwat = 0
            
            opm_correc = ""

            if openwat < float(config.get('OppervlakteParameters', 'input_check_min_openwater_ha')):
                openwat = float(config.get('OppervlakteParameters', 'input_check_min_openwater_ha'))

            delta = area - (verhard + onvsted + onvland + kassen + openwat)
            if delta > input_check_bound_upper or delta < input_check_bound_lower:
                if (onvland + delta) > 0:
                    onvland = onvland + delta
                    log.info("Oppervlak %s voor peilvak %s aangepast." % (onvland_field, ident))
                    opm_correc = "Oppervlak %s voor peilvak aangepast." % (onvland_field)
                elif (onvsted + onvland + delta) > 0:
                    onvsted = onvsted + onvland + delta
                    onvland = 0
                    log.info("Oppervlak %s en %s voor peilvak %s aangepast." % (onvland_field, onvsted_field, ident))
                    opm_correc = "Oppervlak %s en %s voor peilvak aangepast." % (onvland_field, onvsted_field)

                elif (kassen + onvsted + onvland + delta) > 0:
                    kassen = kassen + onvsted + onvland + delta
                    onvland = 0
                    onvsted = 0
                    log.info("Oppervlak %s, %s en %s voor peilvak %s aangepast." % (kassen_field, onvland_field, onvsted_field, ident))
                    opm_correc = "Oppervlak %s, %s en %s voor peilvak aangepast." % (kassen_field, onvland_field, onvsted_field)
                elif (verhard + kassen + onvsted + onvland + delta) > 0:
                    verhard = verhard + kassen + onvsted + onvland + delta
                    onvland = 0
                    onvsted = 0
                    kassen = 0
                    log.info("Oppervlak %s, %s, %s en %s voor peilvak %s aangepast." % (verhard_field, kassen_field, onvland_field, onvsted_field, ident))
                    opm_correc = "Oppervlak %s, %s, %s en %s voor peilvak aangepast." % (verhard_field, kassen_field, onvland_field, onvsted_field)
                else:
                    log.info("Oppervlakken voor peilvak %s niet gecorrigeerd." % ident)
            else:
                log.info("Oppervlak %s correct." % ident)

            #write output
            #in the worst case, we only fill in opm_correc. so we always update the row
            row.SetValue(area_field, area)
            row.SetValue(onvland_field, onvland)
            row.SetValue(verhard_field, verhard)
            row.SetValue(onvsted_field, onvsted)
            row.SetValue(kassen_field, kassen)
            row.SetValue(openwat_field, openwat)
            rr_oppervlak_dict[ident] = {"onverhard stedelijk": onvsted, "onverhard landelijk": onvland}

            if len(opm_correc) > 50:
                opm_correc = opm_correc[:50]

            row.SetValue(opm_correc_field, opm_correc)
            rows.UpdateRow(row)
            row = rows.next()
            
        del rows
        del row
                
        if input_gewassen != "#":
            crop_fields = [config.get('OppervlakteParameters', 'grass_area'),
                           config.get('OppervlakteParameters', 'corn_area'),
                           config.get('OppervlakteParameters', 'potatoes_area'),
                           config.get('OppervlakteParameters', 'sugarbeet_area'),
                           config.get('OppervlakteParameters', 'grain_area'),
                           config.get('OppervlakteParameters', 'miscellaneous_area'),            
                           config.get('OppervlakteParameters', 'greenhouse_area'),
                           config.get('OppervlakteParameters', 'orchard_area'),
                           config.get('OppervlakteParameters', 'bulbous_plants_area'),
                           config.get('OppervlakteParameters', 'foliage_forest_area'),
                           config.get('OppervlakteParameters', 'pine_forest_area'),
                           config.get('OppervlakteParameters', 'nature_area'),
                           config.get('OppervlakteParameters', 'fallow_area'),
                           config.get('OppervlakteParameters', 'vegetables_area'),
                           config.get('OppervlakteParameters', 'flowers_area')]
            
            nonarab_field = config.get('OppervlakteParameters', 'nonarable_land_area')
            
            rows = gp.UpdateCursor(input_gewassen)    
            row = rows.next()
            while row:
                ident = row.GetValue(gpgident_field)
                correct_onvsted_ha = float(rr_oppervlak_dict[ident]['onverhard stedelijk'])
                correct_onvland_ha = float(rr_oppervlak_dict[ident]['onverhard landelijk'])
                
                row.SetValue(nonarab_field, correct_onvsted_ha)

                total_crop_area = 0
                for crop_field in crop_fields:
                    total_crop_area += (float(row.GetValue(crop_field)))
                                        
                percentage = correct_onvland_ha / total_crop_area
                for crop_field in crop_fields:
                    original_ha = float(row.GetValue(crop_field))
                    new_ha = original_ha * percentage
                    row.SetValue(crop_field, new_ha)
                rows.UpdateRow(row)
                row = rows.next()
                
            del rows
            del row
                
        mainutils.log_footer()

    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 21
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        output_dir = sys.argv[7]
        if not gp.exists(output_dir):
            os.makedirs(output_dir)

        log.info("output_dir: " + output_dir)

        #add extra logfile
        fileHandler2 = logging.FileHandler(output_dir + '\\rr_convert.log')
        logging.getLogger("nens").addHandler(fileHandler2)
        #----------------------------------------------------------------------------------------
        #check inputfields
        log.info("Getting commandline parameters... ")
        if len(sys.argv) != 0:
            peilgebieden_feature = sys.argv[1]
            rr_dataset = sys.argv[2]
            afvoerkunstwerken = sys.argv[4]
            settings = sys.argv[6]
        else:
            log.error("Usage: python rural_rr_conversie.py <peilgebieden_feature> <rr_dataset> <rr_afvoer> <afvoerkunstwerken> <settings>")
            sys.exit(1)

        rr_dataset = rr_dataset.replace("'", "")
        rr_dataset = rr_dataset.replace("\\", "/")
        sys.argv[2] = rr_dataset

        #default settings
        if settings == "#":
            location_script = os.path.dirname(sys.argv[0])
            settings = os.path.join(location_script, config.get('RR', 'rr_default_settings'))

        rr_config = mainutils.read_config(settings, os.path.basename(settings))

        if not rr_config.get("column.peilgebied", 'paved_runoff_coefficient'):
            log.warning("paved_runoff_coefficient not available in rr-settings, default will be used")
            rr_config.set("column.peilgebied", 'paved_runoff_coefficient', "-")
            rr_config.set("default.peilgebied", 'paved_runoff_coefficient', '0.2')

        #----------------------------------------------------------------------------------------
        #check input parameters
        log.info('Checking presence of input files')
        if not(gp.exists(peilgebieden_feature)):
            log.error("input_toetspunten " + peilgebieden_feature + " does not exist!")
            sys.exit(5)

        #checking if feature class contains polygons
        log.info("Checking if feature contains polygons")
        if gp.describe(peilgebieden_feature).ShapeType != "Polygon":
            log.error(peilgebieden_feature + " does not contain polygons, please add a feature class with polygons")
            sys.exit(5)

        # add xy coordinates
        log.info(settings)
        xcoord = 'XCOORD'
        ycoord = 'YCOORD'
        if not turtlebase.arcgis.is_fieldname(gp, peilgebieden_feature, xcoord):
            gp.addfield(peilgebieden_feature, xcoord, "Double")
        if not turtlebase.arcgis.is_fieldname(gp, peilgebieden_feature, ycoord):
            gp.addfield(peilgebieden_feature, ycoord, "Double")
        add_xy_coords(gp, peilgebieden_feature, xcoord, ycoord)

        #checking if feature class contains points
        if afvoerkunstwerken != "#":
            log.info("Checking if feature contains points")
            log.debug("ShapeType afvoerkunstwerken = " + gp.describe(afvoerkunstwerken).ShapeType)
            if gp.describe(afvoerkunstwerken).ShapeType != "Point":
                log.error(afvoerkunstwerken + " does not contain points, please add a feature class with points")
                sys.exit(5)

        #copy settings to output directory
        shutil.copyfile(settings, output_dir + '\\RR_Settings.ini')

        drainage = config.get('RR', 'drainage')
        log.info("drainage type is " + drainage)

        output_sobek = output_dir + "\\sobek_input"
        if not gp.exists(output_sobek):
            os.makedirs(output_sobek)

        trrrlib.main({}, sys.argv[1:6] + [settings] + [output_sobek] + [drainage] + ["RR"])
        log.info("*********************************************************")
        log.info("RR Conversie compleet")
        log.info("*********************************************************")

    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 22
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Input parameters
        if len(sys.argv) == 5:
            hydrobase = sys.argv[1]
            input_kwelkaart = sys.argv[2]
            input_bodemkaart = sys.argv[3]
            output_waterbalance = sys.argv[4]
        else:
            log.error("usage: <hydrobase> <input_kwelkaart> <input_bodemkaart> <output_waterbalance>")
            sys.exit(1)

        peilgebieden_fc = os.path.join(hydrobase, 'RR_Features',
                                       config.get('waterbalans',
                                                  'peilgebieden_fc'))
        if not gp.exists(peilgebieden_fc):
                log.error("Features '%s' is not available in the hydrobase" % config.get('waterbalans', 'peilgebieden_fc'))
                sys.exit(1)

        rr_peilgebied = os.path.join(hydrobase,
                                     config.get('waterbalans',
                                                'rr_peilgebied'))
        if not gp.exists(rr_peilgebied):
                log.error("Table '%s' is not available in the hydrobase" % config.get('waterbalans', 'rr_peilgebied'))
                sys.exit(1)

        rr_oppervlak = os.path.join(hydrobase,
                                    config.get('waterbalans',
                                               'rr_oppervlak'))
        if not gp.exists(rr_oppervlak):
                log.error("Table '%s' is not available in the hydrobase" % config.get('waterbalans', 'rr_oppervlak'))
                sys.exit(1)

        if input_kwelkaart == '#':
            rr_kwelwegzijging = os.path.join(hydrobase,
                                         config.get('waterbalans',
                                                    'rr_kwelwegzijging'))
            if not gp.exists(rr_kwelwegzijging):
                log.error("No seepage data available")
                sys.exit(1)
        else:
            rr_kwelwegzijging = '#'

        if input_bodemkaart == '#':
            rr_grondsoort = os.path.join(hydrobase,
                                         config.get('waterbalans',
                                                    'rr_grondsoort'))
            if not gp.exists(rr_grondsoort):
                log.error("No soil data available")
                sys.exit(1)
        else:
            rr_grondsoort = '#'

        #---------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")

        missing_fields = []

        #<check required fields from input data,
        #        append them to list if missing>
        #check_fields = {}
        gpgident = config.get("general", "gpgident").lower()
        gafident = config.get("waterbalance", "gafident").lower()
        gafnaam = config.get("waterbalance", "gafnaam").lower()

        check_fields = {peilgebieden_fc: [gpgident, gafident, gafnaam],
                         rr_peilgebied: [gpgident, "zomerpeil", "winterpeil"]}
        for input_fc, fieldnames in check_fields.items():
            for fieldname in fieldnames:
                if not turtlebase.arcgis.is_fieldname(
                        gp, input_fc, fieldname):
                    errormsg = "fieldname %s not available in %s" % (
                                    fieldname, input_fc)
                    log.error(errormsg)
                    missing_fields.append(errormsg)

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #---------------------------------------------------------------------
        # Environments
        log.info("Check numbers of fields in input data")
        errorcode = 0
        nr_gpg = turtlebase.arcgis.fc_records(gp, peilgebieden_fc)
        if nr_gpg == 0:
            log.error("%s fc is empty" % peilgebieden_fc)
            errorcode += 1

        nr_peilgebied = turtlebase.arcgis.fc_records(gp, rr_peilgebied)
        if not nr_peilgebied == nr_gpg:
            log.error("%s (%s records) does not contain the same amount of records as %s (%s)" % (rr_peilgebied, nr_peilgebied,
                                                                                                  peilgebieden_fc, nr_gpg))
            errorcode += 1

        nr_oppervlak = turtlebase.arcgis.fc_records(gp, rr_oppervlak)
        if not nr_oppervlak == nr_gpg:
            log.error("%s (%s records) does not contain the same amount of records as %s (%s)" % (rr_oppervlak, nr_oppervlak,
                                                                                                  peilgebieden_fc, nr_gpg))
            errorcode += 1

        if rr_grondsoort != '#':
            nr_grondsoort = turtlebase.arcgis.fc_records(gp, rr_grondsoort)
            if not nr_grondsoort == nr_gpg:
                log.error("%s (%s records) does not contain the same amount of records as %s (%s)" % (rr_grondsoort, nr_grondsoort,
                                                                                                      peilgebieden_fc, nr_gpg))
                errorcode += 1
        else:
            nr_grondsoort = 0
        if rr_kwelwegzijging != '#':
            nr_kwelwegzijging = turtlebase.arcgis.fc_records(gp, rr_kwelwegzijging)
            if not nr_kwelwegzijging == nr_gpg:
                log.error("%s (%s records) does not contain the same amount of records as %s (%s)" % (rr_kwelwegzijging, nr_kwelwegzijging,
                                                                                                      peilgebieden_fc, nr_gpg))
                errorcode += 1
        else:
            nr_kwelwegzijging = 0

        if errorcode > 0:
            log.error("%s errors found, see above" % errorcode)
            sys.exit(1)

        log.info("Join tables")
        log.info(" - read %s" % peilgebieden_fc)
        peilgebieden = nens.gp.get_table(gp, peilgebieden_fc, primary_key=gpgident, no_shape=True)
        log.info(" - join %s" % rr_peilgebied)
        nens.gp.join_on_primary_key(gp, peilgebieden, rr_peilgebied, gpgident)
        log.info(" - join %s" % rr_oppervlak)
        nens.gp.join_on_primary_key(gp, peilgebieden, rr_oppervlak, gpgident)
        if rr_grondsoort != '#':
            log.info(" - join %s" % rr_grondsoort)
            nens.gp.join_on_primary_key(gp, peilgebieden, rr_grondsoort, gpgident)
        if rr_kwelwegzijging != '#':
            log.info(" - join %s" % rr_kwelwegzijging)
            nens.gp.join_on_primary_key(gp, peilgebieden, rr_kwelwegzijging, gpgident)

        required_keys = ["verhard_ha", "onvsted_ha", "kassen_ha",
                         "openwat_ha", "gras_ha", "natuur_ha", "zomerpeil",
                         "winterpeil", "shape_area", "hectares"]

        #---------------------------------------------------------------------
        # Calculate Kwel/Wegzijging
        if input_kwelkaart == '#' == input_bodemkaart:
            pass
        else:
            workspace = config.get('GENERAL', 'location_temp')

            turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

            if not os.path.isdir(workspace):
                os.makedirs(workspace)
            workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
            if errorcode == 1:
                log.error("failed to create a file geodatabase in %s" % workspace)

            if input_kwelkaart != '#':
                # Check out Spatial Analyst extension license
                gp.CheckOutExtension("Spatial")

                kwel_table = os.path.join(workspace_gdb, 'kwel_zs_table')
                #poldershape = os.path.join(workspace_gdb, 'polders')
                #gp.Dissolve_management(peilgebieden_fc, poldershape, gafident)

                gp.ZonalStatisticsAsTable_sa(peilgebieden_fc, gafident, input_kwelkaart, kwel_table, "DATA")
                kwelwegzijging = nens.gp.get_table(gp, kwel_table, primary_key=gafident, no_shape=True)

                #log.info(kwelwegzijging)

            if input_bodemkaart != '#':
                temp_bodemsoort = os.path.join(workspace_gdb, "temp_bodem")
                gp.select_analysis(input_bodemkaart, temp_bodemsoort)
                temp_peilgebied = os.path.join(workspace_gdb, "temp_peilgebied")
                gp.select_analysis(peilgebieden_fc, temp_peilgebied)
                intersect_bodem = os.path.join(workspace_gdb, "intersect_bodem")
                gp.Intersect_analysis("%s;%s" % (temp_peilgebied, temp_bodemsoort), intersect_bodem)

                bodemsoorten_polders = sort_bodemsoorten(gp, intersect_bodem, gafident)

        """
        WAARDES INVULLEN VAN KWEL EN BODEM IN DICT!
        """
        #---------------------------------------------------------------------
        # Waterbalance
        polders = {}
        log.info("Extract data for waterbalance")
        for k, v in peilgebieden.items():
            for required_key in required_keys:
                if required_key not in v.keys():
                    log.error("Cannot find %s for gpgident: %s" % (required_key, k))
                    sys.exit(1)

            if 'grondsoort' not in v:
                grondsoort = 0
            else:
                grondsoort = v['grondsoort']

            if 'kwelstroom' not in v:
                kwelstroom = 0
            else:
                kwelstroom = v['kwelstroom']

            if v[gafident] in polders:

                polders[v[gafident]]["peilgebieden"].append((k, v["shape_area"], v["verhard_ha"], v["onvsted_ha"], v["kassen_ha"],
                                                             v["openwat_ha"], v["gras_ha"], v["natuur_ha"], v['hectares'], grondsoort, kwelstroom))
            else:
                polders[v[gafident]] = {"peilgebieden": [(k, v["shape_area"], v["verhard_ha"], v["onvsted_ha"], v["kassen_ha"],
                                                          v["openwat_ha"], v["gras_ha"], v["natuur_ha"], v['hectares'], grondsoort, kwelstroom)]}

        waterbalance = {}
        log.info("Calculate data for waterbalance")
        for polder, attributes in polders.items():
            main_gpg, sum_area = max_gpg(attributes['peilgebieden'])
            kwelstroom, verhard_ha, onvsted_ha, kassen_ha, openwat_ha, gras_ha, natuur_ha = calculate_averages(attributes['peilgebieden'], sum_area)
            if input_bodemkaart == '#':
                bod1, bod2, bod3 = calculate_soiltypes(attributes['peilgebieden'])
            else:
                bod1 = "Bodem 1"
                bod2 = "Bodem 2"
                bod3 = "Bodem 3"

            if input_kwelkaart != '#':
                if polder in kwelwegzijging:
                    kwelstroom = kwelwegzijging[polder]['mean']
                else:
                    kwelstroom = 0
                    log.warning("%s has no seepage data" % polder)

            if kwelstroom > 0:
                kwel = kwelstroom
                wegz = 0
            else:
                wegz = -1 * kwelstroom
                kwel = 0

            winterp = peilgebieden[main_gpg]['winterpeil']
            zomerp = peilgebieden[main_gpg]['zomerpeil']
            sum_ha = sum_area / 10000
            waterbalance[polder] = [("Code", polder, "TEXT"), ("Naam", polder, "TEXT"),
                                    ("Main_GPG", main_gpg, "TEXT"), ("Bodemh", -999, "DOUBLE"),
                                    ("Kwel", kwel, "DOUBLE"), ("Wegz", wegz, "DOUBLE"),
                                    ("Winterpeil", winterp, "DOUBLE"), ("Zomerpeil", zomerp, "DOUBLE"),
                                    ("Totaal_ha", sum_ha, "DOUBLE"), ("Verhard_ha", verhard_ha, "DOUBLE"),
                                    ("Onvsted_ha", onvsted_ha, "DOUBLE"), ("Kassen_ha", kassen_ha, "DOUBLE"),
                                    ("Openwat_ha", openwat_ha, "DOUBLE"), ("Gras_ha", gras_ha, "DOUBLE"),
                                    ("Natuur_ha", natuur_ha, "DOUBLE"), ("Bodem1", bod1, "TEXT"),
                                    ("Bodem2", bod2, "TEXT"), ("Bodem3", bod3, "TEXT")]

        log.info("Write output table")
        gp.CreateTable(os.path.dirname(output_waterbalance), os.path.basename(output_waterbalance))
        for key, values in waterbalance.items():
            for attribute in values:
                log.info(" - add field %s" % attribute[0])
                gp.AddField(output_waterbalance, attribute[0], attribute[2])
            break

        log.info("Inserting new records")
        update_count = 0
        nsertCursor = gp.InsertCursor(output_waterbalance)
        for key, values in waterbalance.items():
            newRow = nsertCursor.NewRow()
            for attribute in values:
                newRow.SetValue(attribute[0], attribute[1])
            nsertCursor.InsertRow(newRow)
            update_count += 1

        log.info(" - %s records have been inserted" % update_count)
        log.info("Finished")

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 23
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)
        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()
            log.info("location temp: %s" % workspace)

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        #get argv
        log.info("Getting command parameters... ")
        if len(sys.argv) == 7:
            input_peilgebieden_feature = sys.argv[1] #from HydroBase
            input_lgn = sys.argv[2]
            input_conversiontable_dbf = sys.argv[3]
            input_watershape = sys.argv[4]
            output_table = sys.argv[5] #RR_oppervlak in HydroBase
            output_crop_table = sys.argv[6]
        else:
            log.error("Arguments: <LGN raster> <peilgebied HydroBase-table> <conversiontable dbf> <output HydroBase-table>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        lgn_desc = gp.describe(input_lgn)
        if lgn_desc.DataType == 'RasterDataset' or lgn_desc.DataType == 'RasterLayer':
            if lgn_desc.PixelType[0] not in ["S", "U"]:
                errormsg = "input %s is not an integer raster!" % input_lgn
                log.error(errormsg)
                geometry_check_list.append(errormsg)
                # Create shapefile from input raster
            else:
                log.info("Input LGN is a raster, convert to feature class")
                temp_lgn_fc = turtlebase.arcgis.get_random_file_name(workspace_gdb)
                gp.RasterToPolygon_conversion(input_lgn, temp_lgn_fc, "NO_SIMPLIFY")
        elif lgn_desc.DataType == 'ShapeFile' or lgn_desc.DataType == 'FeatureClass':
            if lgn_desc.ShapeType != 'Polygon':
                errormsg = "input %s is not an integer raster!" % input_lgn
                log.error(errormsg)
                geometry_check_list.append(errormsg)
            else:
                # Copy shapefile to workspace
                log.info("Input LGN is a feature class, copy to workspace")
                temp_lgn_fc = turtlebase.arcgis.get_random_file_name(workspace_gdb)
                gp.Select_analysis(input_lgn, temp_lgn_fc)
        else:
            log.error("datatype of LGN is %s , must be a ShapeFile, FeatureClass, RasterDataset or RasterLayer" % lgn_desc.DataType)
            sys.exit(5)

        if not(gp.exists(input_peilgebieden_feature)):
            errormsg = "input %s does not exist!" % input_peilgebieden_feature
            log.error(errormsg)
            geometry_check_list.append(errormsg)

        if not(gp.exists(input_conversiontable_dbf)):
            errormsg = "input %s does not exist!" % input_conversiontable_dbf
            log.error(errormsg)
            geometry_check_list.append(errormsg)

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)

        #----------------------------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")

        missing_fields = []

        "<check required fields from input data, append them to list if missing>"
        gpgident = config.get('GENERAL', 'gpgident')        
        if not turtlebase.arcgis.is_fieldname(gp, input_peilgebieden_feature, gpgident):
            log.debug(" - missing: %s in %s" % (gpgident, input_peilgebieden_feature))
            missing_fields.append("%s: %s" % (input_peilgebieden_feature, gpgident))

        hectares = config.get('OppervlakteParameters', 'input_oppervlak_area')
        verhard_ha = config.get('OppervlakteParameters', 'input_oppervlak_verhard')
        onvsted_ha = config.get('OppervlakteParameters', 'input_oppervlak_onvsted')
        kassen_ha = config.get('OppervlakteParameters', 'input_oppervlak_kassen')
        onvland_ha = config.get('OppervlakteParameters', 'input_oppervlak_onvland')
        openwat_ha = config.get('OppervlakteParameters', 'input_oppervlak_openwat')
        lgn_id = config.get('OppervlakteParameters', 'input_field_lgncode')
        conversion_fields = [lgn_id, verhard_ha, onvsted_ha, kassen_ha, onvland_ha, openwat_ha, hectares]
        for conversion_field in conversion_fields:
            if not turtlebase.arcgis.is_fieldname(gp, input_conversiontable_dbf, conversion_field):
                log.debug(" - missing: %s in %s" % (conversion_field, input_conversiontable_dbf))
                missing_fields.append("%s: %s" % (input_conversiontable_dbf, conversion_field))

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)

        #----------------------------------------------------------------------------------------
        # 2a) copy input targetlevel areas to workspace
        log.info("A) Create feature class input_peilgebieden_feature -> tempfile_peilgebied")
        peilgebieden_temp = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.select_analysis(input_peilgebieden_feature, peilgebieden_temp)

        # 2b) intersect(lgn+peilgebieden)
        log.info("B) Intersect lgn_shape + tempfile_peilgebied -> lgn_peilgebieden")
        intersect_temp = os.path.join(workspace_gdb, 'intersect_lgn_gpg')
        gp.Union_analysis("%s;%s" % (temp_lgn_fc, peilgebieden_temp), intersect_temp)

        # 3a) Read conversiontable into memory"
        log.info("C-1) Read conversiontable into memory")
        conversion = nens.gp.get_table(gp, input_conversiontable_dbf, primary_key=lgn_id.lower())

        # 3b) calculate areas for lgn_id
        log.info("C-2) Calculate areas for tempfile_LGN_peilgebied using conversiontable")
        #read gpgident from file
        lgn_fieldnames = nens.gp.get_table_def(gp, temp_lgn_fc)
        if "gridcode" in lgn_fieldnames:
            gridcode = "GRIDCODE"
        elif "grid_code" in lgn_fieldnames:
            gridcode = "grid_code"
        else:
            log.error("Cannot find 'grid_code' or 'gridcode' field in input lgn file")

        gewastypen = {1: config.get('OppervlakteParameters', 'grass_area'),
                      2: config.get('OppervlakteParameters', 'corn_area'),
                      3: config.get('OppervlakteParameters', 'potatoes_area'),
                      4: config.get('OppervlakteParameters', 'sugarbeet_area'),
                      5: config.get('OppervlakteParameters', 'grain_area'),
                      6: config.get('OppervlakteParameters', 'miscellaneous_area'),
                      7: config.get('OppervlakteParameters', 'nonarable_land_area'),
                      8: config.get('OppervlakteParameters', 'greenhouse_area'),
                      9: config.get('OppervlakteParameters', 'orchard_area'),
                      10: config.get('OppervlakteParameters', 'bulbous_plants_area'),
                      11: config.get('OppervlakteParameters', 'foliage_forest_area'),
                      12: config.get('OppervlakteParameters', 'pine_forest_area'),
                      13: config.get('OppervlakteParameters', 'nature_area'),
                      14: config.get('OppervlakteParameters', 'fallow_area'),
                      15: config.get('OppervlakteParameters', 'vegetables_area'),
                      16: config.get('OppervlakteParameters', 'flowers_area'),
                      }
        output_with_area = {}
        output_gewas_areas = {}
        unknown_lgn_codes = {}
        source_str = "lgn:" + os.path.basename(input_lgn) + " pg:" + os.path.basename(input_peilgebieden_feature)
        if len(source_str) > 50:
            source_str = source_str[:50]
        date_str = time.strftime('%x')

        calc_count = 0
        rows = gp.UpdateCursor(intersect_temp)
        for row in nens.gp.gp_iterator(rows):
            value_gpgident = row.GetValue(gpgident)
            if value_gpgident == "":
                continue
            value_gridcode = row.GetValue(gridcode)
            if value_gridcode == 0:
                if value_gpgident in output_with_area:
                    output_with_area[value_gpgident][hectares] += float(row.shape.Area) / 10000
                else:
                    output_with_area[value_gpgident] = {gpgident : value_gpgident, hectares : float(row.shape.Area) / 10000}
                continue
                    
            value_lgn_id = int(value_gridcode)
            value_peilgeb_area = float(row.shape.Area) / 10000 #Area is in m2
            
            if 'gewastype' in conversion[value_lgn_id]:
                gewastype = conversion[value_lgn_id]['gewastype']
            else:
                gewastype = 1
            #add to area
            if value_gpgident in output_with_area:
                add_to_area, gewastype_ha, error = conv_ha(conversion, value_lgn_id, float(value_peilgeb_area), gewastype)
                for key in add_to_area.keys(): #all relevant keys
                    if key in output_with_area[value_gpgident]:
                        output_with_area[value_gpgident][key] += float(add_to_area[key])
                    else:
                        output_with_area[value_gpgident][key] = float(add_to_area[key])
            else:
                output_with_area[value_gpgident], gewastype_ha, error = conv_ha(conversion, value_lgn_id, float(value_peilgeb_area), gewastype)
                output_with_area[value_gpgident][gpgident] = value_gpgident #set GPGIDENT
                if error and not(value_lgn_id in unknown_lgn_codes):
                    log.warning(" - Warning: lgncode " + str(value_lgn_id) + " not known (check conversiontable)")
                    unknown_lgn_codes[value_lgn_id] = 1
            
            if gewastype != 0:
                if value_gpgident not in output_gewas_areas:
                    output_gewas_areas[value_gpgident] = {gpgident: value_gpgident}
                    for key in gewastypen.keys():
                        output_gewas_areas[value_gpgident][gewastypen[key]] = 0
                    
                output_gewas_areas[value_gpgident][gewastypen[gewastype]] += gewastype_ha
                
            output_with_area[value_gpgident]['LGN_SOURCE'] = source_str
            output_with_area[value_gpgident]['LGN_DATE'] = date_str
            calc_count = calc_count + 1
            if calc_count % 100 == 0:
                log.info("Calculating field nr " + str(calc_count))
        #----------------------------------------------------------------------------------------
        if input_watershape != "#":
            log.info("C-3) Calculate open water from watershape")

            # 1) intersect(watershape+peilgebieden)
            log.info("- intersect water_shape + tempfile_peilgebied -> watershape_peilgebieden")
            watershape_intersect = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            gp.Intersect_analysis("%s;%s" % (input_watershape, peilgebieden_temp), watershape_intersect)

            source_watershape = os.path.basename(input_watershape)
            if len(source_watershape) > 50:
                source_watershape = source_watershape[:50]

            watershape_areas = {}
            rows = gp.SearchCursor(watershape_intersect)
            for row in nens.gp.gp_iterator(rows):
                water_area_ha = float(row.shape.Area) / 10000 #Area is in m2
                peilgebied_id = row.GetValue(gpgident)
                if peilgebied_id in watershape_areas:
                    subtotal_area = watershape_areas[peilgebied_id]['area']
                    #overwrite key with sum areas
                    watershape_areas[peilgebied_id] = {'area': subtotal_area + water_area_ha}
                else:
                    #create new key with area
                    watershape_areas[peilgebied_id] = {'area': water_area_ha}
            #update outputtable
            for peilgebied_id in output_with_area.keys():
                if peilgebied_id in watershape_areas:
                    output_with_area[peilgebied_id]['OPNWT_GBKN'] = watershape_areas[peilgebied_id]['area']
                    output_with_area[peilgebied_id]['GBKN_DATE'] = date_str
                    output_with_area[peilgebied_id]['GBKN_SOURCE'] = source_watershape

        #----------------------------------------------------------------------------------------
        # 4) put dictionary area into output_table (HydroBase)
        log.info("D) Saving results... ")

        #definition of fields
        areaFields = {gpgident: {'type': 'TEXT', 'length': '30'},
                      'VERHRD_LGN':{'type': 'DOUBLE'},
                      'ONVSTD_LGN':{'type': 'DOUBLE'},
                      'KASSEN_LGN':{'type': 'DOUBLE'},
                      'ONVLND_LGN':{'type': 'DOUBLE'},
                      'OPENWT_LGN':{'type': 'DOUBLE'},
                      'HECTARES':{'type': 'DOUBLE'},
                      'OPNWT_GBKN':{'type': 'DOUBLE'},
                      'LGN_SOURCE':{'type': 'TEXT', 'length': '50'},
                      'LGN_DATE':{'type': 'TEXT', 'length': '50'},
                      'GBKN_DATE':{'type': 'TEXT', 'length': '50'},
                      'GBKN_SOURCE':{'type': 'TEXT', 'length': '50'}}

        #check if output_table exists. if not, create with correct rows
        log.info("Checking table...")
        if not(gp.exists(output_table)):
            try:
                gp.CreateTable(os.path.dirname(output_table), os.path.basename(output_table))
            except Exception, e:
                log.error("Error: creating table " + output_table)
                log.debug(e)
                sys.exit(14)

        #check if output_table has the correct rows
        log.info("Checking fields...")
        for field_name, field_settings in areaFields.items():
            if 'length' in field_settings:
                if not turtlebase.arcgis.is_fieldname(gp, output_table, field_name):
                    gp.AddField(output_table, field_name, field_settings['type'], '#', '#', field_settings['length'])
            else:
                if not turtlebase.arcgis.is_fieldname(gp, output_table, field_name):
                    gp.AddField(output_table, field_name, field_settings['type'])

        #----------------------------------------------------------------------------------------
        #log.info(output_with_area)
        turtlebase.arcgis.write_result_to_output(output_table, gpgident.lower(), output_with_area)

        #----------------------------------------------------------------------------------------
        # 5) Calculate crop areas
        if output_crop_table != "#":
            
            
            log.info("E) Calculate crop areas... ")
            
            #definition of fields
            cropFields = {gpgident: {'type': 'TEXT', 'length': '30'},
                          'GRAS_HA':{'type': 'DOUBLE'},
                          'MAIS_HA':{'type': 'DOUBLE'},
                          'AARDAPL_HA':{'type': 'DOUBLE'},
                          'BIET_HA':{'type': 'DOUBLE'},
                          'GRAAN_HA':{'type': 'DOUBLE'},
                          'OVERIG_HA':{'type': 'DOUBLE'},
                          'NIETAGR_HA':{'type': 'DOUBLE'},
                          'GLAST_HA':{'type': 'DOUBLE'},
                          'BOOMGRD_HA':{'type': 'DOUBLE'},
                          'BOLLEN_HA':{'type': 'DOUBLE'},
                          'LOOFBOS_HA':{'type': 'DOUBLE'},
                          'NLDBOS_HA':{'type': 'DOUBLE'},
                          'NATUUR_HA':{'type': 'DOUBLE'},
                          'BRAAK_HA':{'type': 'DOUBLE'},
                          'GROENTN_HA':{'type': 'DOUBLE'},
                          'BLOEMEN_HA':{'type': 'DOUBLE'}}

            #check if output_table exists. if not, create with correct rows
            log.info("Checking table...")
            if not(gp.exists(output_crop_table)):
                try:
                    gp.CreateTable(os.path.dirname(output_crop_table), os.path.basename(output_crop_table))
                except Exception, e:
                    log.error("Error: creating table " + output_crop_table)
                    log.debug(e)
                    sys.exit(14)
            
            #check if output_table has the correct rows
            log.info("Checking fields...")
            for field_name, field_settings in cropFields.items():
                if 'length' in field_settings:
                    if not turtlebase.arcgis.is_fieldname(gp, output_crop_table, field_name):
                        gp.AddField(output_crop_table, field_name, field_settings['type'], '#', '#', field_settings['length'])
                else:
                    if not turtlebase.arcgis.is_fieldname(gp, output_crop_table, field_name):
                        gp.AddField(output_crop_table, field_name, field_settings['type'])
                        
            write_result_to_output(gp, output_crop_table, gpgident.lower(), output_gewas_areas)
Ejemplo n.º 24
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #read conversion settings
        modeltype = "RR+RR_CF"

        #---------------------------------------------------------------------
        #check inputfields
        log.info("Getting commandline parameters")

        if len(sys.argv) == 8:
            peilgebieden_feature = sys.argv[1]
            rr_dataset = sys.argv[2]
            afvoerkunstwerken = sys.argv[4]
            koppelpunten = sys.argv[5]
            settings = sys.argv[6]
            output_dir = sys.argv[7]
        else:
            log.error("Usage: python rural_rr_rrcf_conversie.py \
            <peilgebieden_feature> <rr_dataset> <rr_afvoer> \
            <afvoerkunstwerken> <koppelpunten> <settings>")
            sys.exit(1)

        rr_dataset = rr_dataset.replace("\\", "/")
        rr_dataset = rr_dataset.replace("'", "")
        sys.argv[2] = rr_dataset

        log.info("output_dir: " + output_dir)

        #add extra logfile
        fileHandler2 = logging.FileHandler(output_dir + '\\rr_rrcf_convert.log')
        logging.getLogger("nens.turtle").addHandler(fileHandler2)

        #---------------------------------------------------------------------
        #check input parameters
        log.info('Checking presence of input files')
        if not(gp.exists(peilgebieden_feature)):
            log.error("input_toetspunten " + peilgebieden_feature + " does not exist!")
            sys.exit(5)

        #----------------------------------------------------------------------------------------
        #default settings
        if settings == "#":
            location_script = os.path.dirname(sys.argv[0])
            settings = os.path.join(location_script, config.get('RR', 'rr_rrcf_default_settings'))

        rr_config = mainutils.read_config(settings, os.path.basename(settings))

        if not rr_config.get("column.peilgebied", 'paved_runoff_coefficient'):
            log.warning("paved_runoff_coefficient not available in rr+rrcf-settings, default will be used")
            rr_config.set("column.peilgebied", 'paved_runoff_coefficient', "-")
            rr_config.set("default.peilgebied", 'paved_runoff_coefficient', '0.2')

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')

        configfilename = os.path.join(workspace, "rr_rrcf_settings_temp.ini")
        configfile = open(configfilename, "wb")
        rr_config.write(configfile)
        configfile.close()
        settings = configfilename
        #----------------------------------------------------------------------------------------
        #checking if feature class contains polygons
        log.info("Checking if feature contains polygons")
        pg_obj = gp.describe(peilgebieden_feature)
        if pg_obj.ShapeType != "Polygon":
            log.error(peilgebieden_feature + " does not contain polygons, please add a feature class with polygons")
            log.error(" - gp message: " + gp.GetMessages(2))
            sys.exit(5)

        # If rr_afvoer is empty, ignore this table, because trrrlib will crash
        if sys.argv[3] != '#':
            if turtlebase.arcgis.fc_is_empty(gp, sys.argv[3]):
                log.warning("rr_afvoer is empty, this table will be ignored")
                sys.argv[3] = '#'

        # add xy coordinates
        xcoord = 'XCOORD'
        ycoord = 'YCOORD'
        if not turtlebase.arcgis.is_fieldname(gp, peilgebieden_feature, xcoord):
            gp.addfield(peilgebieden_feature, xcoord, "Double")
        if not turtlebase.arcgis.is_fieldname(gp, peilgebieden_feature, ycoord):
            gp.addfield(peilgebieden_feature, ycoord, "Double")
        add_xy_coords(gp, peilgebieden_feature, xcoord, ycoord)

        #checking if feature class contains points
        if afvoerkunstwerken != "#":
            log.info("Checking if feature contains points")
            ak_obj = gp.describe(afvoerkunstwerken)
            log.debug("ShapeType afvoerkunstwerken = " + ak_obj.ShapeType)
            if ak_obj.ShapeType != "Point":
                log.error(afvoerkunstwerken + " does not contain points, please add a feature class with points")
                log.error(" - gp message: " + gp.GetMessages(2))
                sys.exit(5)

        #check op punten
        if koppelpunten != "#":
            log.info("Checking if feature contains points")
            kp_obj = gp.describe(koppelpunten)
            log.debug("ShapeType koppelpunten = " + kp_obj.ShapeType)
            if kp_obj.ShapeType != "Point":
                log.error(koppelpunten + " does not contain points, please add a feature class with points")
                log.debug(gp.GetMessages(2))
                sys.exit()

        #copy settings to output directory
        shutil.copyfile(settings, output_dir + '\\RR_RRCF_Settings.ini')

        drainage = config.get('RR', 'drainage')
        log.info("drainage type is " + drainage)

        #export rrcf connection to output folder. Convert feature class to shape
        output_shapefiles = output_dir + '\\shapefiles'
        if not os.path.isdir(output_shapefiles):
            os.makedirs(output_shapefiles)
        log.debug("export rrcf connection nodes to" + output_shapefiles)

        gp.Select_analysis(koppelpunten, output_shapefiles + "\\rrcf_connection.shp")
        log.debug("features exported")

        output_sobek = output_dir + "\\sobek_input"
        if not os.path.isdir(output_sobek):
            os.makedirs(output_sobek)

        log.debug("from trrrlib import trrrlib")
        trrrlib.main({}, sys.argv[1:6] + [settings] + [output_sobek] + [drainage] + [modeltype])
        if os.path.isfile(output_sobek + "/struct.def"):
            os.remove(output_sobek + "/struct.def")
        if os.path.isfile(output_sobek + "/profile.dat"):
            os.remove(output_sobek + "/profile.dat")
        log.info("*********************************************************")
        log.info(modeltype + " Conversie compleet")
        log.info("*********************************************************")

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 25
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()
        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        # Input parameters
        if len(sys.argv) == 11:
            log.info("Reading input parameters")
            peilgebied = sys.argv[1]
            input_rr_peilgebied = sys.argv[2]
            input_rr_maaiveld = sys.argv[3]
            input_ahn = sys.argv[4]
            input_lgn = sys.argv[5]
            conversion = sys.argv[6]
            input_hymstat = sys.argv[7]
            output_risk_table = sys.argv[8]
            output_risico = sys.argv[9]
            output_risico_inundation = sys.argv[10]
        else:
            log.error("usage: <peilgebied> <input_rr_peilgebied> <input_rr_maaiveld> <input_ahn> <input_lgn>\
                      <conversion> <input_hymstat> <output_risk_table> <output_risico> <output_risico_inundation>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        log.info(" - read Conversion table")
        conv_list = [d for d in csv.DictReader(open(conversion))]

        expected_keys = ['LGN', 'K5', 'maxschade', 'sr1', 'sr2', 'sr3',
                         'sr4', 'sr5', 'sr6', 'sr7', 'sr8', 'sr9']
        for k in expected_keys:
            if k not in conv_list[0].keys():
                log.error('could not find key %s in conversion table' % k)
                sys.exit(2)

        schadefuncties = {}
        for item in conv_list:
            schadefuncties[int(item['LGN'])] = item
        #----------------------------------------------------------------------------------------
        log.info(" - read hymstat table")
        csv_list = [d for d in csv.DictReader(open(input_hymstat))]
        expected_hymstat_keys = ['Location', 'Scale par. beta', 'Location par. x0']

        for k in expected_hymstat_keys:
            csv_list[0].keys()
            if k not in csv_list[0].keys():
                log.error('could not find key %s in hymstat table' % k)
                sys.exit(2)

        hymstat = {}
        for item in csv_list:
            hymstat[item[config.get('risico', 'hymstat_id')]] = item
        #----------------------------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        #log.debug(" - check <input >: %s" % argument1)

        "<check geometry from input data, append to list if incorrect>"

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)

        #----------------------------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")

        missing_fields = []

        #<check required fields from input data, append them to list if missing>
        check_fields = {}#check_fields = {input_1: [fieldname1, fieldname2], input_2: [fieldname1, fieldname2]}
        for input_fc, fieldnames in check_fields.items():
            for fieldname in fieldnames:
                if not turtlebase.arcgis.is_fieldname(gp, input_fc, fieldname):
                    errormsg = "fieldname %s not available in %s" % (fieldname, input_fc)
                    log.error(errormsg)
                    missing_fields.append(errormsg)

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)

        #----------------------------------------------------------------------------------------
        # Environments
        log.info("Set environments")
        temp_peilgebieden = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Select_analysis(peilgebied, temp_peilgebieden)

        cellsize = gp.describe(input_ahn).MeanCellHeight  # use same cell size as AHN
        gp.extent = gp.describe(temp_peilgebieden).extent  # use extent from Peilgebieden
        gpgident = config.get('GENERAL', 'gpgident')

        #----------------------------------------------------------------------------------------
        # create ahn ascii
        log.info("Create ascii from ahn")

        ahn_ascii = turtlebase.arcgis.get_random_file_name(workspace, ".asc")
        log.debug("ahn ascii: %s" % ahn_ascii)
        gp.RasterToASCII_conversion(input_ahn, ahn_ascii)

        #----------------------------------------------------------------------------------------
        # create lgn ascii
        log.info("Create ascii from lgn")
        #read gpgident from file
        lgn_desc = gp.describe(input_lgn)
        if lgn_desc.DataType == 'ShapeFile' or lgn_desc.DataType == 'FeatureClass':
            lgn_fieldnames = nens.gp.get_table_def(gp, input_lgn)
            if "gridcode" in lgn_fieldnames:
                gridcode = "GRIDCODE"
            elif "grid_code" in lgn_fieldnames:
                gridcode = "grid_code"
            else:
                log.error("Cannot find 'grid_code' or 'gridcode' field in input lgn file")

            temp_lgn = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            gp.FeatureToRaster_conversion(input_lgn, gridcode, temp_lgn, cellsize)
        elif lgn_desc.DataType == 'RasterDataset':
            temp_lgn = input_lgn
            if not lgn_desc.MeanCellHeight == cellsize:
                log.error("LGN cellsize does not match AHN cellsize (%sx%s m)" % cellsize)
                sys.exit(5)
        else:
            log.error("cannot recognize datatype of LGN, must be a fc, shapefile or a raster dataset")
            sys.exit(5)

        lgn_ascii = turtlebase.arcgis.get_random_file_name(workspace, ".asc")
        log.debug("lgn ascii: %s" % lgn_ascii)
        gp.RasterToASCII_conversion(temp_lgn, lgn_ascii)

        #----------------------------------------------------------------------------------------
        log.info("Create ascii from surface level areas")
        if not turtlebase.arcgis.is_fieldname(gp, temp_peilgebieden, "ID_INT"):
            gp.AddField(temp_peilgebieden, "ID_INT", "LONG")

        id_int = 1
        idint_to_peilvakid = {}
        peilvakid_to_idint = {}
        if turtlebase.arcgis.is_fieldname(gp, temp_peilgebieden, gpgident):
            rows = gp.SearchCursor(temp_peilgebieden)
            for row in nens.gp.gp_iterator(rows):
                peilvakid = row.GetValue(gpgident)
                idint_to_peilvakid[id_int] = peilvakid
                peilvakid_to_idint[peilvakid] = id_int
                id_int = id_int + 1 #each row gets a new id_int

        log.info(" - calc value ID_INT")
        rows = gp.UpdateCursor(temp_peilgebieden)
        for row in nens.gp.gp_iterator(rows):
            gpg_ident = row.GetValue(gpgident)
            id_int = peilvakid_to_idint[gpg_ident]
            row.SetValue("ID_INT", id_int)
            rows.UpdateRow(row)

        log.info("Conversion feature peilgebieden to raster")
        InField = "ID_INT"
        temp_peilgebieden_raster = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.FeatureToRaster_conversion(temp_peilgebieden, InField, temp_peilgebieden_raster, cellsize)

        peilgeb_asc = turtlebase.arcgis.get_random_file_name(workspace, ".asc")
        gp.RasterToASCII_conversion(temp_peilgebieden_raster, peilgeb_asc)

        #----------------------------------------------------------------------------------------
        # Read input tables into dictionaries
        log.info("Read input tables")
        log.info(" - read RR_Peilgebied")
        rr_peilgebied = nens.gp.get_table(gp, input_rr_peilgebied, primary_key=gpgident.lower())
        log.info(" - read RR_Maaiveld")
        rr_maaiveld = nens.gp.get_table(gp, input_rr_maaiveld, primary_key=gpgident.lower())

        log.info(" - read conversion table between id_int and gpgident")
        gpg_conv = nens.gp.get_table(gp, temp_peilgebieden, primary_key='id_int')

        #----------------------------------------------------------------------------------------
        log.info("Calculate Risk")
        temp_risico = turtlebase.arcgis.get_random_file_name(workspace, "risk.asc")
        temp_risico_in = turtlebase.arcgis.get_random_file_name(workspace, ".asc")
        risico_tbl = turtlebase.risico.create_risk_grid(ahn_ascii, lgn_ascii,
                                                        peilgeb_asc, rr_peilgebied, rr_maaiveld,
                                                        hymstat, gpg_conv, schadefuncties, temp_risico,
                                                        temp_risico_in, cellsize)

        risk_result = turtlebase.risico.create_risico_dict(risico_tbl, schadefuncties, primary_key=gpgident)
        for k in risk_result.keys():
            risk_result[k]['SOURCE'] = "hymstat: %s, ahn: %s, lgn: %s" % (os.path.basename(input_hymstat),
                                                         os.path.basename(input_ahn),
                                                         os.path.basename(input_lgn))
            risk_result[k]['DATE_TIME'] = time.strftime("%d-%m-%Y, %H:%M:%S")

        gp.ASCIIToRaster_conversion(temp_risico, output_risico, "FLOAT")
        gp.ASCIIToRaster_conversion(temp_risico_in, output_risico_inundation, "FLOAT")

        # Schrijf de resultaten weg als een nieuwe tabel
        if not(gp.exists(output_risk_table)):
            log.info("creating table " + output_risk_table)
            gp.CreateTable(os.path.dirname(output_risk_table), os.path.basename(output_risk_table))

        risk_fields = nens.gp.get_table_def(gp, output_risk_table)
        fields_to_add = [{'fieldname': gpgident, 'fieldtype': 'text', 'length': 50},
                         {'fieldname': 'RIS_GW', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_GW_ST', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_GW_HL', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_GW_AK', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_GW_GR', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_GW_NT', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_IN', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_IN_ST', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_IN_HL', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_IN_AK', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_IN_GR', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_IN_NT', 'fieldtype': 'Double'},
                         {'fieldname': 'SOURCE', 'fieldtype': 'text', 'length': 256},
                         {'fieldname': 'DATE_TIME', 'fieldtype': 'text', 'length': 25},
                         {'fieldname': 'COMMENTS', 'fieldtype': 'text', 'length': 256}]

        for field_to_add in fields_to_add:
            if field_to_add['fieldname'].lower() not in risk_fields:
                if 'length' in field_to_add:
                    gp.addfield_management(output_risk_table, field_to_add['fieldname'], field_to_add['fieldtype'], "#", "#", field_to_add['length'])
                else:
                    gp.addfield_management(output_risk_table, field_to_add['fieldname'], field_to_add['fieldtype'])

        turtlebase.arcgis.write_result_to_output(output_risk_table, gpgident, risk_result)
        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        tempfiles = os.listdir(workspace)
        for tempfile in tempfiles:
            if tempfile.endswith('.asc') or tempfile.endswith('.prj') :
                try:
                    os.remove(os.path.join(workspace, tempfile))
                    log.debug("%s/%s removed" % (workspace, tempfile))
                except Exception, e:
                    log.debug(e)

        mainutils.log_footer()
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #---------------------------------------------------------------------
        # Input parameters
        """
        nodig voor deze tool:
        """
        
        if len(sys.argv) == 8:
            peilvakken_input = sys.argv[1]
            watergangen_as_input = sys.argv[2]
            stuwen_input = sys.argv[3]
            gemalen_input = sys.argv[4]
            afstand_input = sys.argv[5]
            output_peilscheiding_vereist = sys.argv[6]
            output_kunstwerken_zonder_peilscheiding = sys.argv[7]
              
        else:
            log.warning("usage: <argument1> <argument2>")
            #sys.exit(1)

        for argv in sys.argv[1:5]:
            turtlebase.filenames.check_filename(argv)
        for argv in sys.argv[6:]:
            turtlebase.filenames.check_filename(argv)
            
        #---------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        #log.debug(" - check <input >: %s" % argument1)
        if not turtlebase.arcgis.is_file_of_type(gp, peilvakken_input, 'Polygon'):
            log.error("%s is not a %s feature class!" % (peilvakken_input, 'Polygon'))
            geometry_check_list.append("%s -> (%s)" % (peilvakken_input, 'Polygon'))

        if not turtlebase.arcgis.is_file_of_type(gp, watergangen_as_input, 'Polyline'):
            log.error("%s is not a %s feature class!" % (watergangen_as_input, 'Polyline'))
            geometry_check_list.append("%s -> (%s)" % (watergangen_as_input, 'Polyline'))

        if not turtlebase.arcgis.is_file_of_type(gp, stuwen_input, 'Point'):
            log.error("%s is not a %s feature class!" % (stuwen_input, 'Point'))
            geometry_check_list.append("%s -> (%s)" % (stuwen_input, 'Point'))

        if not turtlebase.arcgis.is_file_of_type(gp, gemalen_input, 'Point'):
            log.error("%s is not a %s feature class!" % (gemalen_input, 'Point'))
            geometry_check_list.append("%s -> (%s)" % (gemalen_input, 'Point'))


        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        
        #---------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")

        missing_fields = []

        kstident_fieldname = config.get('GENERAL', 'kstident')
        kgmident_fieldname = config.get('GENERAL', 'kgmident')
        
        check_fields = {stuwen_input: [kstident_fieldname],
                         gemalen_input: [kgmident_fieldname]}
        
        for input_fc, fieldnames in check_fields.items():
            for fieldname in fieldnames:
                if not turtlebase.arcgis.is_fieldname(
                        gp, input_fc, fieldname):
                    errormsg = "fieldname %s not available in %s" % (
                                    fieldname, input_fc)
                    log.error(errormsg)
                    missing_fields.append(errormsg)

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #---------------------------------------------------------------------
        # Environments
        
        # Create temp files
        kunstwerken_merged = turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
        kunstwerken_merged_buffer = turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
        peilscheidingen = turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
        correcte_peilscheidingen =  turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
        peilscheidingen_buffer =  turtlebase.arcgis.get_random_file_name(workspace_gdb, "")
        
        # Read kunstwerken ids
        log.info('Reading which kunstwerken are available')
        log.info('Stuwen')
        list_stuwen_idents =read_idents(gp, stuwen_input, kstident_fieldname) 
        log.info('Gemalen')
        list_gemalen_idents = read_idents(gp, gemalen_input, kgmident_fieldname) 
        # Process: Merge kunstwerken tot 1 bestand
        log.info('Samenvoegen kunstwerken')
        gp.Merge_management("%s;%s" %(stuwen_input,gemalen_input), kunstwerken_merged)
        
        
        # Process: Intersect
        log.info('Bepalen vereiste locaties peilscheidingen')
        gp.Intersect_analysis("%s #;%s #" %(watergangen_as_input, peilvakken_input), peilscheidingen, "ALL", "", "POINT")
        
        
        # Aanmaken unieke ident peilscheidingen
        peilscheidingident_fieldname = 't_id'
        if turtlebase.arcgis.is_fieldname(gp, peilscheidingen, peilscheidingident_fieldname) == False:
            gp.Addfield_management(peilscheidingen, peilscheidingident_fieldname, 'TEXT')
        # Populate ident peilscheidingen 
        populate_ident(gp, peilscheidingen, peilscheidingident_fieldname)
        
        # Process: Buffer kunstwerken met door gebruiker ingegeven afstand
        log.info('Bufferen peilscheidingen')
        afstand_input_value = "%s Meters" %afstand_input
        gp.Buffer_analysis(peilscheidingen, peilscheidingen_buffer, afstand_input_value)

        # Process: Intersect (2)
        log.info('Controle aanwezige peilscheidingen')
        gp.Intersect_analysis("%s #;%s #" %(kunstwerken_merged, peilscheidingen_buffer), correcte_peilscheidingen, "ALL", "", "POINT")
        
        # Tijdelijke proces:
        
        log.info('Selecteren van locaties waar verwachte peilscheiding niet aanwezig')
        
        list_peilscheidingen_idents = read_idents(gp, correcte_peilscheidingen, peilscheidingident_fieldname) 
        
        where_clause_peilscheiding_vereist = create_where_clause_peilscheiding_vereist(gp, peilscheidingen, peilscheidingident_fieldname, list_peilscheidingen_idents)
        gp.Select_analysis(peilscheidingen, output_peilscheiding_vereist, where_clause_peilscheiding_vereist)
        
        log.info('Selecteren van kunstwerken die niet op een peilscheiding liggen')
        # Create list met kunstwerken die op een peilscheiding liggen
        list_kst_met_peilscheiding =  read_idents(gp, correcte_peilscheidingen, kstident_fieldname)
        list_kgm_met_peilscheiding =  read_idents(gp, correcte_peilscheidingen, kgmident_fieldname)
        list_kunstwerken_met_peilscheiding = list_kgm_met_peilscheiding + list_kst_met_peilscheiding
        
        
        where_clause_kw_zonder_peilscheiding = create_where_clause_kunstwerken_zonder_peilscheiding(gp, kunstwerken_merged, kstident_fieldname, kgmident_fieldname, list_kunstwerken_met_peilscheiding)
        
        gp.Select_analysis(kunstwerken_merged, output_kunstwerken_zonder_peilscheiding, where_clause_kw_zonder_peilscheiding)
        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            #gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
        
        
        
        
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Input parameters
        if len(sys.argv) == 5:
            input_watergangen = sys.argv[1]
            procent = sys.argv[2]
            max_distance = sys.argv[3]
            output_profiles = sys.argv[4]
        else:
            log.warning("usage: <input_watergangen> <procent> <max_distance> <output_profiles>")
            sys.exit(1)
        
        percentage = float(procent) / 100

        gp.CreateFeatureclass_management(os.path.dirname(output_profiles), os.path.basename(output_profiles), "POINT")
        gp.AddField_management(output_profiles, "LOCIDENT", "TEXT")
        gp.AddField_management(output_profiles, "PROIDENT", "TEXT")
    
        in_rows = gp.SearchCursor(input_watergangen)
        in_row = in_rows.Next()
        out_rows = gp.InsertCursor(output_profiles)
        pnt = gp.CreateObject("Point")
        
        inDesc = gp.describe(input_watergangen)
        log.info("draw profiles")
        while in_row:
            ident = in_row.GetValue("OVKIDENT")
            log.info("- %s" % ident)
        
            feat = in_row.GetValue(inDesc.ShapeFieldName)
        
            lengte = feat.length
        
            part = feat.getpart(0)
            pnt_list = [(float(pnt.x), float(pnt.y)) for pnt in nens.gp.gp_iterator(part)]
        
            XY = calculate_sp(percentage, max_distance, lengte, pnt_list)
            pnt.X = XY[0]
            pnt.Y = XY[1]
        
            out_row = out_rows.newRow()
            out_row.shape = pnt
            out_row.setValue("PROIDENT", ident)
            out_row.setValue("LOCIDENT", "%s_a" % ident)
            out_rows.insertRow(out_row)
        
            pnt_list.reverse()
            XY = calculate_sp(percentage, max_distance, lengte, pnt_list)
            pnt.X = XY[0]
            pnt.Y = XY[1]
        
            out_row = out_rows.newRow()
            out_row.shape = pnt
            out_row.setValue("PROIDENT", ident)
            out_row.setValue("LOCIDENT", "%s_b" % ident)
            out_rows.insertRow(out_row)
            in_row = in_rows.Next()
        
        del out_rows
        del in_rows
        
        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 28
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #---------------------------------------------------------------------
        # Input parameters
        """
        nodig voor deze tool:
        """
        if len(sys.argv) == "<number of arguments for this tool>":
            argument1 = sys.argv[1]
        else:
            log.warning("usage: <argument1> <argument2>")
            #sys.exit(1)

        #---------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        #log.debug(" - check <input >: %s" % argument1)

        #"<check geometry from input data, append to list if incorrect>"

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        #---------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")

        missing_fields = []

        #<check required fields from input data,
        #        append them to list if missing>
        check_fields = {}
        #check_fields = {input_1: [fieldname1, fieldname2],
        #                 input_2: [fieldname1, fieldname2]}
        for input_fc, fieldnames in check_fields.items():
            for fieldname in fieldnames:
                if not turtlebase.arcgis.is_fieldname(
                        gp, input_fc, fieldname):
                    errormsg = "fieldname %s not available in %s" % (
                                    fieldname, input_fc)
                    log.error(errormsg)
                    missing_fields.append(errormsg)

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #---------------------------------------------------------------------
        # Environments

        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, "turtle-settings.ini")
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        # ----------------------------------------------------------------------------------------
        # check inputfields
        log.info("Getting commandline parameters")
        if len(sys.argv) == 6:
            rr_peilgebieden_tbl = sys.argv[1]
            rr_oppervlak_tbl = sys.argv[2]
            rr_toetspunten_tbl = sys.argv[3]
            rr_resultaten_tbl = sys.argv[4]
            output_waterbezwaar_tbl = sys.argv[5]
        else:
            log.error(
                "Usage: python rural_indicatie_waterbezwaar.py <rr_peilgebieden_tbl> <rr_oppervlak_tbl> <rr_toetspunten_tbl> <rr_resultaten_tbl> <output_waterbezwaar_tbl>"
            )
            sys.exit(1)
        # ----------------------------------------------------------------------------------------
        # check input parameters
        log.info("Checking presence of input files")
        if not (gp.exists(rr_peilgebieden_tbl)):
            log.error("tabel %s does not exist!" % rr_peilgebieden_tbl)
            sys.exit(5)
        if not (gp.exists(rr_oppervlak_tbl)):
            log.error("tabel %s does not exist!" % rr_oppervlak_tbl)
            sys.exit(5)
        if not (gp.exists(rr_toetspunten_tbl)):
            log.error("tabel %s does not exist!" % rr_toetspunten_tbl)
            sys.exit(5)
        if not (gp.exists(rr_resultaten_tbl)):
            log.error("tabel %s does not exist!" % rr_resultaten_tbl)
            sys.exit(5)
        log.info("input parameters checked")

        # ---------------------------------------------------------------------------
        gpgident = config.get("GENERAL", "gpgident").lower()

        # create list from geodatabase table
        gegevens = nens.gp.get_table(gp, rr_resultaten_tbl, primary_key=gpgident)
        nens.gp.join_on_primary_key(gp, gegevens, rr_toetspunten_tbl, gpgident)
        nens.gp.join_on_primary_key(gp, gegevens, rr_peilgebieden_tbl, gpgident)
        nens.gp.join_on_primary_key(gp, gegevens, rr_oppervlak_tbl, gpgident)

        # calculating waterbezwaar
        log.info("calculating surplus water")

        # check input fields
        check_row = gegevens.values()[0]
        check_fields = [
            config.get("waterbezwaar", "toetspunt_overlast_stedelijk"),
            config.get("waterbezwaar", "toetspunt_overlast_hoogwlandbouw"),
            config.get("waterbezwaar", "toetspunt_overlast_akkerbouw"),
            config.get("waterbezwaar", "toetspunt_overlast_grasland"),
            config.get("waterbezwaar", "toetspunt_inundatie_stedelijk"),
            config.get("waterbezwaar", "toetspunt_inundatie_hoogwlandbouw"),
            config.get("waterbezwaar", "toetspunt_inundatie_akkerbouw"),
            config.get("waterbezwaar", "toetspunt_inundatie_grasland"),
            config.get("waterbezwaar", "peilgebied_winterpeil"),
            config.get("waterbezwaar", "peilgebied_helling"),
            config.get("waterbezwaar", "waterstand_inundatie_stedelijk"),
            config.get("waterbezwaar", "waterstand_inundatie_hoogwlandbouw"),
            config.get("waterbezwaar", "waterstand_inundatie_akkerbouw"),
            config.get("waterbezwaar", "waterstand_inundatie_grasland"),
            config.get("waterbezwaar", "waterstand_overlast_stedelijk"),
            config.get("waterbezwaar", "waterstand_overlast_hoogwlandbouw"),
            config.get("waterbezwaar", "waterstand_overlast_akkerbouw"),
            config.get("waterbezwaar", "waterstand_overlast_grasland"),
            config.get("waterbezwaar", "oppervlak_openwater"),
        ]
        missing_fields = check_items(check_row, check_fields)
        if missing_fields:
            log.error(
                "at least one of the input fields is missing, check ini-file and database. %s" % (str(missing_fields))
            )
            sys.exit(6)

        waterbezwaar = {}
        for id, row in gegevens.items():
            if row["tldhelling"] == None:
                tp_slope = 0
            else:
                tp_slope = float(row["tldhelling"])

            if row["winterpeil"] == None:
                winterpeil = 0
            else:
                winterpeil = float(row["winterpeil"])

            if row["openwat_ha"] == None:
                ow_opp = 0
            else:
                ow_opp = float(row["openwat_ha"])

            # bereken waterbezwaar inundatie stedelijk
            toetshoogte_i_st = float(row["mtgmv_i_st"])
            if toetshoogte_i_st == winterpeil:
                toetshoogte_i_st = toetshoogte_i_st + 0.05
                log.warning("Toetspunt inundatie stedelijk is gelijk aan winterpeil, toetspunt + 5cm")
            waterstand_i_st = float(row["ws_100"])
            sted_i_wb_m3, sted_i_wb_ha = calc_waterbezwaar(
                toetshoogte_i_st, ow_opp, tp_slope, winterpeil, waterstand_i_st
            )

            # bereken waterbezwaar overlast stedelijk
            toetshoogte_o_st = float(row["mtgmv_o_st"])
            if toetshoogte_o_st == winterpeil:
                toetshoogte_o_st = toetshoogte_o_st + 0.05
                log.warning("Toetspunt overlast stedelijk is gelijk aan winterpeil, toetspunt + 5cm")
            waterstand_o_st = float(row["ws_25"])
            sted_o_wb_m3, sted_o_wb_ha = calc_waterbezwaar(
                toetshoogte_o_st, ow_opp, tp_slope, winterpeil, waterstand_o_st
            )

            # bereken waterbezwaar inundatie hoogwaardige landbouw
            toetshoogte_i_hl = float(row["mtgmv_i_hl"])
            if toetshoogte_i_hl == winterpeil:
                toetshoogte_i_hl = toetshoogte_i_hl + 0.05
                log.warning("Toetspunt inundatie hoogwaardige landbouw is gelijk aan winterpeil, toetspunt + 5cm")
            waterstand_i_hl = float(row["ws_50"])
            hoogw_i_wb_m3, hoogw_i_wb_ha = calc_waterbezwaar(
                toetshoogte_i_hl, ow_opp, tp_slope, winterpeil, waterstand_i_hl
            )

            # bereken waterbezwaar overlast hoogwaardige landbouw
            toetshoogte_o_hl = float(row["mtgmv_o_hl"])
            if toetshoogte_o_hl == winterpeil:
                toetshoogte_o_hl = toetshoogte_o_hl + 0.05
                log.warning("Toetspunt overlast hoogwaardige landbouw is gelijk aan winterpeil, toetspunt + 5cm")
            waterstand_o_hl = float(row["ws_25"])
            hoogw_o_wb_m3, hoogw_o_wb_ha = calc_waterbezwaar(
                toetshoogte_o_hl, ow_opp, tp_slope, winterpeil, waterstand_o_hl
            )

            # bereken waterbezwaar inundatie akkerbouw
            toetshoogte_i_ak = float(row["mtgmv_i_ak"])
            if toetshoogte_i_ak == winterpeil:
                toetshoogte_i_ak = toetshoogte_i_ak + 0.05
                log.warning("Toetspunt inundatie akkerbouw is gelijk aan winterpeil, toetspunt + 5cm")
            waterstand_i_ak = float(row["ws_25"])
            akker_i_wb_m3, akker_i_wb_ha = calc_waterbezwaar(
                toetshoogte_i_ak, ow_opp, tp_slope, winterpeil, waterstand_i_ak
            )

            # bereken waterbezwaar overlast akkerbouw
            toetshoogte_o_ak = float(row["mtgmv_o_ak"])
            if toetshoogte_o_ak == winterpeil:
                toetshoogte_o_ak = toetshoogte_o_ak + 0.05
                log.warning("Toetspunt overlast akkerbouw is gelijk aan winterpeil, toetspunt + 5cm")
            waterstand_o_ak = float(row["ws_15"])
            akker_o_wb_m3, akker_o_wb_ha = calc_waterbezwaar(
                toetshoogte_o_ak, ow_opp, tp_slope, winterpeil, waterstand_o_ak
            )

            # bereken waterbezwaar inundatie grasland
            toetshoogte_i_gr = float(row["mtgmv_i_gr"])
            if toetshoogte_i_gr == winterpeil:
                toetshoogte_i_gr = toetshoogte_i_gr + 0.05
                log.warning("Toetspunt inundatie grasland is gelijk aan winterpeil, toetspunt + 5cm")
            waterstand_i_gr = float(row["ws_10"])
            gras_i_wb_m3, gras_i_wb_ha = calc_waterbezwaar(
                toetshoogte_i_gr, ow_opp, tp_slope, winterpeil, waterstand_i_gr
            )

            # bereken waterbezwaar overlast grasland
            toetshoogte_o_gr = float(row["mtgmv_o_gr"])
            if toetshoogte_o_gr == winterpeil:
                toetshoogte_o_gr = toetshoogte_o_gr + 0.05
                log.warning("Toetspunt overlast grasland is gelijk aan winterpeil, toetspunt + 5cm")
            waterstand_o_gr = float(row["ws_5"])
            gras_o_wb_m3, gras_o_wb_ha = calc_waterbezwaar(
                toetshoogte_o_gr, ow_opp, tp_slope, winterpeil, waterstand_o_gr
            )

            wb_i_m3 = max(sted_i_wb_m3, hoogw_i_wb_m3, akker_i_wb_m3, gras_i_wb_m3)
            wb_o_m3 = max(sted_o_wb_m3, hoogw_o_wb_m3, akker_o_wb_m3, gras_o_wb_m3)
            wb_i_ha = max(sted_i_wb_ha, hoogw_i_wb_ha, akker_i_wb_ha, gras_i_wb_ha)
            wb_o_ha = max(sted_o_wb_ha, hoogw_o_wb_ha, akker_o_wb_ha, gras_o_wb_ha)

            waterbezwaar[id] = {
                "gpgident": id,
                "wb_i_m3": wb_i_m3,
                "wb_o_m3": wb_o_m3,
                "wb_i_ha": wb_i_ha,
                "wb_o_ha": wb_o_ha,
                "wb_i_st_ha": sted_i_wb_ha,
                "wb_i_hl_ha": hoogw_i_wb_ha,
                "wb_i_ak_ha": akker_i_wb_ha,
                "wb_i_gr_ha": gras_i_wb_ha,
                "wb_o_st_ha": sted_o_wb_ha,
                "wb_o_hl_ha": hoogw_o_wb_ha,
                "wb_o_ak_ha": akker_o_wb_ha,
                "wb_o_gr_ha": gras_o_wb_ha,
                "wb_i_st_m3": sted_i_wb_m3,
                "wb_i_hl_m3": hoogw_i_wb_m3,
                "wb_i_ak_m3": akker_i_wb_m3,
                "wb_i_gr_m3": gras_i_wb_m3,
                "wb_o_st_m3": sted_o_wb_m3,
                "wb_o_hl_m3": hoogw_o_wb_m3,
                "wb_o_ak_m3": akker_o_wb_m3,
                "wb_o_gr_m3": gras_o_wb_m3,
            }

        # Schrijf de resultaten weg als een nieuwe tabel
        if not (gp.exists(output_waterbezwaar_tbl)):
            log.info("creating table " + output_waterbezwaar_tbl)
            gp.CreateTable(os.path.dirname(output_waterbezwaar_tbl), os.path.basename(output_waterbezwaar_tbl))

        wb_fields = [
            config.get("waterbezwaar", "output_field_wb_i_m3"),
            config.get("waterbezwaar", "output_field_wb_i_ha"),
            config.get("waterbezwaar", "output_field_wb_o_m3"),
            config.get("waterbezwaar", "output_field_wb_o_ha"),
            config.get("waterbezwaar", "output_field_wb_i_st_ha"),
            config.get("waterbezwaar", "output_field_wb_i_hl_ha"),
            config.get("waterbezwaar", "output_field_wb_i_ak_ha"),
            config.get("waterbezwaar", "output_field_wb_i_gr_ha"),
            config.get("waterbezwaar", "output_field_wb_o_st_ha"),
            config.get("waterbezwaar", "output_field_wb_o_hl_ha"),
            config.get("waterbezwaar", "output_field_wb_o_ak_ha"),
            config.get("waterbezwaar", "output_field_wb_o_gr_ha"),
            config.get("waterbezwaar", "output_field_wb_i_st_m3"),
            config.get("waterbezwaar", "output_field_wb_i_hl_m3"),
            config.get("waterbezwaar", "output_field_wb_i_ak_m3"),
            config.get("waterbezwaar", "output_field_wb_i_gr_m3"),
            config.get("waterbezwaar", "output_field_wb_o_st_m3"),
            config.get("waterbezwaar", "output_field_wb_o_hl_m3"),
            config.get("waterbezwaar", "output_field_wb_o_ak_m3"),
            config.get("waterbezwaar", "output_field_wb_o_gr_m3"),
        ]

        table_def = nens.gp.get_table_def(gp, output_waterbezwaar_tbl)
        output_field_id = config.get("waterbezwaar", "output_field_id")
        if not output_field_id in table_def:
            log.info(" - add field %s to %s" % (output_field_id, os.path.basename(output_waterbezwaar_tbl)))
            gp.AddField(output_waterbezwaar_tbl, output_field_id, "TEXT", "#", "#", 30)

        for double_field in wb_fields:
            if not double_field.lower() in table_def:
                log.info(" - add field %s to %s" % (double_field, os.path.basename(output_waterbezwaar_tbl)))
                gp.AddField(output_waterbezwaar_tbl, double_field, "DOUBLE")

        turtlebase.arcgis.write_result_to_output(output_waterbezwaar_tbl, gpgident, waterbezwaar)
        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Ejemplo n.º 30
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        global tempfile
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        #---------------------------------------------------------------------
        # Input parameters
        """
        nodig voor deze tool:
        """
        if len(sys.argv) == 3:
            sbk_case = sys.argv[1]
            output_gdb = sys.argv[2]            
        else:
            log.warning("usage: <sobek_case_folder> <output_gdb>")
            sys.exit(1)

        if not os.path.isabs(output_gdb):
            log.error("%s is geen juiste outputlocatie" % output_gdb)
            raise OutputError()
        if os.path.dirname(output_gdb).endswith(".gdb"):
            log.error("%s is geen juiste outputlocatie (geodatabase in een geodatabase)" % output_gdb)
            raise OutputError()
        if not output_gdb.endswith(".gdb"):
            output_gdb = output_gdb + ".gdb"
            
        script_path = os.path.dirname(sys.argv[0])
        
        #Kopieren Hydrobase
        hydrobase_cf = os.path.join(script_path, "hydrobases", "HydroBaseCF.gdb")
        log.info(" - copy default hydrobase")
        arcpy.Copy_management(hydrobase_cf, output_gdb, "Workspace")
        #---------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")

        missing_files = []

        check_files = ['network.ntw', 'boundary.dat', 'profile.dat', 'profile.def', 'struct.dat', 'struct.def', 'initial.dat', 'friction.dat', 'control.def']
        for check_file in check_files:
            if not os.path.isfile(os.path.join(sbk_case, check_file)):
                missing_files.append(check_file)

        if len(missing_files) > 0:
            log.error("missing files in sobek directory: %s" % missing_files)
            sys.exit(2)
        #---------------------------------------------------------------------
        time_str = time.strftime("%d/%m/%Y %H:%M:%S")
        log.info("Read Sobek Network file")
        sobek_network_dict = nens.sobek.Network(os.path.join(sbk_case, 'network.ntw'))

        available_types = []
        for id, x, y in sobek_network_dict['SBK_CHANNEL']:
            if x[0] not in available_types:
                available_types.append(x[0])
            if y[0] not in available_types:
                available_types.append(y[0])

        if '3B_LINK' in sobek_network_dict:
            RR_Network = True
            for id, x, y in sobek_network_dict['3B_LINK']:
                if x[0] not in available_types:
                    available_types.append(x[0])
                if y[0] not in available_types:
                    available_types.append(y[0])
        else:
            log.info(" - no RR network found")
            RR_Network = False

        network_coords = {}
        for network_type in available_types:
            for id, x, y in sobek_network_dict[network_type]:
                network_coords[id] = (x, y)

        #---------------------------------------------------------------------    
        #RR Network
        if RR_Network == True:
            log.info("Read RR Features")
            rr_nodes = shapefile.Writer(shapefile.POINT)
            rr_nodes.field('GPGIDENT')
            rr_nodes.field('SBKIDENT')
            rr_nodes.field('TYPE')

            if '3B_UNPAVED' in available_types:
                for i, (ident, x, y) in enumerate(sobek_network_dict['3B_UNPAVED']):
                        rr_nodes.point(float(x), float(y))
                        rr_nodes.record(ident, ident, '3B_UNPAVED')

            if '3B_PAVED' in available_types:
                for i, (ident, x, y) in enumerate(sobek_network_dict['3B_PAVED']):
                        rr_nodes.point(float(x), float(y))
                        rr_nodes.record(ident, ident, '3B_PAVED')
                        

            if '3B_GREENHOUSE' in available_types:
                for i, (ident, x, y) in enumerate(sobek_network_dict['3B_GREENHOUSE']):
                        rr_nodes.point(float(x), float(y))
                        rr_nodes.record(ident, ident, '3B_GREENHOUSE')

            rr_nodes_shp = os.path.join(workspace, 'rr_nodes.shp')
            rr_nodes.save(rr_nodes_shp)

            #RRCF Connection Nodes
            rrcf_connections = shapefile.Writer(shapefile.POINT)            
            rrcf_connections.field('KPIDENT')
            rrcf_connections.field('SBKIDENT')
            rrcf_connections.field('SBKTYPE')
            
            for i, (ident, x, y) in enumerate(sobek_network_dict['SBK_SBK-3B-REACH']):
                rrcf_connections.point(float(x), float(y))
                rrcf_connections.record(ident, ident, 'SBK_SBK-3B-REACH')
                
            rrcf_connections_shp = os.path.join(workspace, "rrcf_connections.shp")
            rrcf_connections.save(rrcf_connections_shp)

            # RR Network
            rr_line = shapefile.Writer(shapefile.POLYLINE)
            rr_line.field('RRIDENT')
            rr_line.field('FROM_POINT')
            rr_line.field('FROM_TYPE')
            rr_line.field('TO_POINT')
            rr_line.field('TO_TYPE')

            for i, (ident, from_node, to_node) in enumerate(sobek_network_dict['3B_LINK']):
                x1, y1 = network_coords[from_node[1]]
                x2, y2= network_coords[to_node[1]]
                rr_line.line(parts=[[[float(x1), float(y1)],[float(x2),float(y2)]]])
                rr_line.record(ident, from_node[1], from_node[0], to_node[1], to_node[0])
                
            rr_lines_shp = os.path.join(workspace, "rr_lines.shp")
            rr_line.save(rr_lines_shp)

            #append rr features
            log.info(" - append rrcf_connections to hydrobase")
            arcpy.Append_management(rrcf_connections_shp, os.path.join(output_gdb, "RR_features", "RRCF_connections"), "NO_TEST")
            log.info(" - append rr_nodes to hydrobase")
            arcpy.Append_management(rr_nodes_shp, os.path.join(output_gdb, "RR_features", "RR_nodes"), "NO_TEST")
            log.info(" - append rr_network to hydrobase")
            arcpy.Append_management(rr_lines_shp, os.path.join(output_gdb, "RR_features", "RR_network"), "NO_TEST")
        else:
            log.info(" - RR Features skipped")
        #---------------------------------------------------------------------
        #Sobek CF Database:
        log.info("Read CF Features")
        # CF Network
        # - channel
        log.info(' - copy channels')
        channel = shapefile.Writer(shapefile.POLYLINE)
        channel.field('OVKIDENT')
        channel.field('FROM_POINT')
        channel.field('FROM_TYPE')
        channel.field('TO_POINT')
        channel.field('TO_TYPE')

        for i, (ident, from_node, to_node) in enumerate(sobek_network_dict['SBK_CHANNEL']):
            x1, y1 = network_coords[from_node[1]]
            x2, y2= network_coords[to_node[1]]
            channel.line(parts=[[[float(x1), float(y1)],[float(x2),float(y2)]]])
            channel.record(ident, from_node[1], from_node[0], to_node[1], to_node[0])
            
        channel_shp = os.path.join(workspace, "channel.shp")
        channel.save(channel_shp)

        #append channels
        log.info(" - append channels to hydrobase")
        append_to_hydrobase(channel_shp, os.path.join(output_gdb, "Channel", "Channel"))
        
        #boundary_dat = nens.sobek.File(os.path.join(sbk_case, 'boundary.dat'))
        #initial_dat = nens.sobek.File(os.path.join(sbk_case, 'initial.dat'))
        #friction_dat = nens.sobek.File(os.path.join(sbk_case, 'friction.dat'))
        #control_def = nens.sobek.File(os.path.join(sbk_case, 'control.def'))
        #lateral_dat = nens.sobek.File(os.path.join(sbk_case, 'lateral.dat'))        

        profiles = {}
        log.info(' - read profile.dat')
        prof_dat = nens.sobek.File(os.path.join(sbk_case, 'profile.dat'))
        for profile in prof_dat['CRSN']:
            profiles[profile['id'][0]] = {'id': profile['id'][0], 'def_id': profile['di'][0], 'ref_level': profile['rl'][0], 'ref_surface': profile['rs'][0]}

        cross_section_definition_csv = os.path.join(workspace, "cross_section_definiton.csv")
        add_to_csv(cross_section_definition_csv, [('PROIDENT', 'TYPE', 'BED_LVL', 'BED_WDTH', 'BED_WDTH_M',
                                                    'WAT_LVL', 'WAT_WDTH', 'WAT_WDTH_M', 'SUR_LVL', 'SUR_WDTH', 'SUR_WDTH_M',
                                                    'TALUD', 'DIAMETER', 'WIDTH', 'HEIGHT', 'SOURCE', 'DATE_TIME', 'COMMENTS')], "wb")
        profiles_def = {}
        log.info(' - read profile.def')
        prof_def = nens.sobek.File(os.path.join(sbk_case, 'profile.def'))
        cross_section_yz_csv = os.path.join(workspace, "cross_section_yz.csv")
        add_to_csv(cross_section_yz_csv, [('PROIDENT', 'DIST_MID', 'BED_LVL')], "wb")
        for profile_def in prof_def['CRDS']:
            min_lvl = 9999
            talud = wat_lvl = wat_wdth = wat_wdth_m = bed_wdth = bed_wdth_m = sur_lvl = sur_wdth = sur_wdth_m = NO_DATA_VALUE
            if profile_def['ty'][0] == 1:
                proftype = 'trapezium'
                talud = profile_def['bs'][0]
                bed_wdth = profile_def['bw'][0]
                sur_wdth = profile_def['aw'][0]
            elif profile_def['ty'][0] == 10:
                proftype = 'yz profiel'
                yz_tabel = profile_def['lt yz'][0]
                
                for i in range(yz_tabel.rows()):
                    add_to_csv(cross_section_yz_csv, [(profile_def['id'][0], yz_tabel[i, 0], yz_tabel[i, 1])], "ab")
                    if yz_tabel[i, 1] < min_lvl:
                        min_lvl = yz_tabel[i, 1]
                
            elif profile_def['ty'][0] == 0:
                proftype = 'tabulated'
                lw_tabel = profile_def['lt lw'][0]
                if lw_tabel.rows() == lw_tabel.cols() == 3:
                    min_lvl = lw_tabel[0, 0]
                    bed_wdth = lw_tabel[0, 1]
                    bed_wdth_m = lw_tabel[0, 2]
                    wat_lvl = lw_tabel[1, 0]
                    wat_wdth = lw_tabel[1, 1]
                    wat_wdth_m = lw_tabel[1, 2]
                    sur_lvl = lw_tabel[2, 0]
                    sur_wdth = lw_tabel[2, 1]
                    sur_wdth_m = lw_tabel[2, 2]                    
                else:
                    for i in range(lw_tabel.rows()):
                        dist_mid = float(lw_tabel[i, 1]) / 2
                        zcoord = float(lw_tabel[i, 0])
                        add_to_csv(cross_section_yz_csv, [(profile_def['id'][0], dist_mid, zcoord)], "ab")
                        add_to_csv(cross_section_yz_csv, [(profile_def['id'][0], 0 - dist_mid, zcoord)], "ab")
                        if lw_tabel[i, 0] < min_lvl:
                            min_lvl = lw_tabel[i, 0]
                    proftype = 'yz_profiel'
            elif profile_def['ty'][0] == 4:
                proftype = 'rond'
                wat_wdth = float(profile_def['rd'][0]) * 2            
                
            else:
                proftype = 'overig: %s' % profile_def['ty'][0]

            profiles_def[profile_def['id'][0]] = {'id': profile_def['id'][0], "type": proftype, 'talud': talud,
                                                  'bed_wdth': bed_wdth, 'bed_wdth_m': bed_wdth_m, 'sur_lvl': sur_lvl, 'sur_wdth': sur_wdth, 'sur_wdth_m': sur_wdth_m,
                                                  'wat_lvl': wat_lvl, 'wat_wdth': wat_wdth, 'wat_wdth_m': wat_wdth_m, 'min_lvl': min_lvl}

        
        for profile, values in profiles.items():
            def_id = values['def_id']
            min_lvl = profiles_def[def_id]['min_lvl']
            if min_lvl != 9999:
                # cross section level shift
                bed_lvl = float(values['ref_level']) + float(min_lvl)
            else:
                bed_lvl = float(values['ref_level'])            
            
            add_to_csv(cross_section_definition_csv, [(profile, profiles_def[def_id]['type'], bed_lvl, profiles_def[def_id]['bed_wdth'], profiles_def[def_id]['bed_wdth_m'],
                                                       profiles_def[def_id]['wat_lvl'], profiles_def[def_id]['wat_wdth'], profiles_def[def_id]['wat_wdth_m'],
                                                       profiles_def[def_id]['sur_lvl'], profiles_def[def_id]['sur_wdth'], profiles_def[def_id]['sur_wdth_m'],
                                                       profiles_def[def_id]['talud'], NO_DATA_VALUE, NO_DATA_VALUE, NO_DATA_VALUE, sbk_case, time_str, "")], "ab")
            
        if 'SBK_CULVERT' in available_types:
            log.info(' - copy cross sections')
            location_shp = os.path.join(workspace, "location.shp")
            location = shapefile.Writer(shapefile.POINT)
                                        
            location_fields = ['LOCIDENT', 'PROIDENT', 'TYPE', 'X_COORD', 'Y_COORD', 'SOURCE', 'DATE_TIME', 'COMMENTS']
            for loc_field in location_fields:
                location.field(loc_field)

            for i, (ident, x, y) in enumerate(sobek_network_dict['SBK_PROFILE']):
                def_id = profiles[ident]['def_id']
                if def_id in profiles_def:
                    location.point(float(x), float(y))
                    
                    proftype = profiles_def[def_id]['type']
                    location.record(ident, ident, proftype, float(x), float(y), sbk_case, time_str, "")
                
            location.save(location_shp)        

            #append data to hydrobase
            log.info(" - append cross sections to hydrobase")
            append_to_hydrobase(location_shp, os.path.join(output_gdb, "Cross_sections", "locations"))
            log.info(" - append cross sections definitions to hydrobase")
            arcpy.Append_management(cross_section_definition_csv, os.path.join(output_gdb, "Cross_section_definition"), "NO_TEST")
            log.info(" - append cross sections yz-table to hydrobase")
            arcpy.Append_management(cross_section_yz_csv, os.path.join(output_gdb, "Cross_section_yz"), "NO_TEST")

        else:
            log.warning(" - no cross sections found ")

        # Structures
        structures = {}
        log.info(' - read structure.dat')
        struct_dat = nens.sobek.File(os.path.join(sbk_case, 'struct.dat'))
        for structure in  struct_dat['STRU']:
            structures[structure['id'][0]] = {'id': structure['id'][0], 'def_id': structure['dd'][0], 'nm': structure['nm'][0]}

        culvert_def = {}
        weir_def = {}
        pump_def = {}
        
        struct_def = nens.sobek.File(os.path.join(sbk_case, 'struct.def'))
        log.info(' - read structure.def')
        for structure_def in struct_def['STDS']:
            if structure_def['ty'][0] == 6:
                weir_def[structure_def['id'][0]] = {'id': structure_def['id'][0], 'name': structure_def['nm'][0], 'crest_lvl': structure_def['cl'][0], 'crest_wdth': structure_def['cw'][0], 'dis_coef': structure_def['ce'][0]}
            elif structure_def['ty'][0] == 9:
                pump_tble = structure_def['ct lt'][1]
                capacity = pump_tble[0,0]
                suc_start = pump_tble[0,1]
                suc_stop = pump_tble[0,2]
                prs_start = pump_tble[0,3]
                prs_stop = pump_tble[0,4]
                pump_def[structure_def['id'][0]] = {'id': structure_def['id'][0], 'name': structure_def['nm'][0], 'capacity': capacity, 'suc_start': suc_start, 'suc_stop': suc_stop, 'prs_start': prs_start, 'prs_stop': prs_stop}
            elif structure_def['ty'][0] == 10:
                culvert_def[structure_def['id'][0]] = {'id': structure_def['id'][0], 'name': structure_def['nm'][0], 'bed_lvl_1': structure_def['ll'][0], 'bed_lvl_2': structure_def['rl'][0],
                                                      'lengte': structure_def['dl'][0], 'inlet_loss': structure_def['li'][0], 'outlet_loss': structure_def['lo'][0], 'profile_ident': structure_def['si'][0]}                  

        # - culvert
        if 'SBK_CULVERT' in available_types:
            log.info(' - copy culverts')
            culvert = shapefile.Writer(shapefile.POINT)
            culvert_fields = ['KWKIDENT', 'KWK_NAAM', 'TYPE', 'DIAMETER', 'WIDTH', 'HEIGHT', 'LENGTH', 'BED_LVL_1', 'BED_LVL_2', 'FRICTION', 'INLET_LOSS', 'OUTLET_LOS', 'SOURCE']
            for c_field in culvert_fields:
                culvert.field(c_field)

            for i, (ident, x, y) in enumerate(sobek_network_dict['SBK_CULVERT']):
                width = height = diameter = NO_DATA_VALUE
                culvert_def_id = structures[ident]['def_id']
                culvert_name = culvert_def[culvert_def_id]['name']
                profile_ident = culvert_def[culvert_def_id]['profile_ident']

                if profile_ident in profiles_def:
                    culvert.point(float(x), float(y))
                    profile_type = profiles_def[profile_ident]['type']
                    if profile_type == 'tabulated':
                        culvert_type = 'rechthoek'
                        width = profiles_def[profile_ident]['bed_wdth']
                        height = profiles_def[profile_ident]['sur_lvl']
                    elif profile_type == 'rond':
                        culvert_type = 'rond'
                        diameter = profiles_def[profile_ident]['wat_wdth']
                    else:
                        culvert_type = 'onbekend'
                                
                    culvert.record(ident, culvert_name, culvert_type, diameter, width, height, culvert_def[culvert_def_id]['lengte'], culvert_def[culvert_def_id]['bed_lvl_1'],
                                   culvert_def[culvert_def_id]['bed_lvl_2'], NO_DATA_VALUE, culvert_def[culvert_def_id]['inlet_loss'],
                                   culvert_def[culvert_def_id]['outlet_loss'], sbk_case)
                else:
                    log.warning("%s heeft geen profiel" % ident)

            culvert_shp = os.path.join(workspace, "culvert.shp")
            culvert.save(culvert_shp)
            #append culverts
            log.info(" - append culverts to hydrobase")
            append_to_hydrobase(culvert_shp, os.path.join(output_gdb, "Structures", "Culvert"))
            
        else:
            log.warning(" - no culverts found ")
            
        # - weir
        if 'SBK_WEIR' in available_types:
            weir = shapefile.Writer(shapefile.POINT)
            weir_fields = ['KWKIDENT', 'KWK_NAME', 'TYPE', 'CREST_WDTH', 'CREST_LVL', 'CREST_SUM', 'CREST_WIN', 'DIS_COEF', 'SOURCE']
            for w_field in weir_fields:
                weir.field(w_field)

            for i, (ident, x, y) in enumerate(sobek_network_dict['SBK_WEIR']):
                weir_def_id = structures[ident]['def_id']
                weir_name = weir_def[weir_def_id]['name']
                weir.point(float(x), float(y))
                weir.record(ident, weir_name, "VAST", weir_def[weir_def_id]['crest_wdth'], weir_def[weir_def_id]['crest_lvl'], NO_DATA_VALUE, NO_DATA_VALUE, weir_def[weir_def_id]['dis_coef'], sbk_case)

            weir_shp = os.path.join(workspace, "weir.shp")
            weir.save(weir_shp)

            #append weirs
            log.info(" - append weirs to hydrobase")
            append_to_hydrobase(weir_shp, os.path.join(output_gdb, "Structures", "Weir"))
        else:
            log.warning(" - no weirs found ")
            
        # - pump stations
        #append pump stations
        pump_def_csv = os.path.join(workspace, "pump_def.csv")
        add_to_csv(pump_def_csv, [('KWKIDENT', 'CAPACITY', 'STAGE', 'SUC_START', 'SUC_STOP',
                                                    'PRS_START', 'PRS_STOP')], "wb")
        
        if 'SBK_PUMP' in available_types:
            pump = shapefile.Writer(shapefile.POINT)
            pump_fields = ['KWKIDENT', 'KWK_NAAM', 'CONTROLLER', 'SOURCE']
            for p_field in pump_fields:
                pump.field(p_field)
            for i, (ident, x, y) in enumerate(sobek_network_dict['SBK_PUMP']):
                pump_def_id = structures[ident]['def_id']
                pump.point(float(x), float(y))
                pump.record(ident, structures[ident]['nm'], "JAAR", sbk_case)
                capacity = (pump_def[pump_def_id]['capacity']) * 3600
                add_to_csv(pump_def_csv, [(ident, capacity, 1, pump_def[pump_def_id]['suc_start'],
                                           pump_def[pump_def_id]['suc_stop'], pump_def[pump_def_id]['prs_start'],
                                           pump_def[pump_def_id]['prs_stop'])], "ab")

            pump_shp = os.path.join(workspace, "pump.shp")              
            pump.save(pump_shp)
            
            log.info(" - append pump stations to hydrobase")        
            append_to_hydrobase(pump_shp, os.path.join(output_gdb, "Structures", "Pump_station"))
            log.info(" - append pump station definitions to hydrobase")
            arcpy.Append_management(pump_def_csv, os.path.join(output_gdb, "Pump_station_def"), "NO_TEST")
        else:
            log.warning(" - no pump stations found ")

        # - lateral flows
        if 'SBK_LATERALFLOW' in available_types:
            log.info(' - copy Lateral flow')
            lateral = shapefile.Writer(shapefile.POINT)
            lateral_fields = ['LAT_IDENT', 'LAT_TYPE', 'DISCHARGE', 'AREA', 'SEEPAGE']
            for l_field in lateral_fields:
                lateral.field(l_field)

            for i, (ident, x, y) in enumerate(sobek_network_dict['SBK_LATERALFLOW']):
                lateral.point(float(x), float(y))
                lateral.record(ident, NO_DATA_VALUE, NO_DATA_VALUE, NO_DATA_VALUE, NO_DATA_VALUE)
    
            lateral_shp = os.path.join(workspace, "lateral.shp")
            lateral.save(lateral_shp)

            #append lateral flow
            log.info(" - append lateral flow nodes to hydrobase")
            arcpy.Append_management(lateral_shp, os.path.join(output_gdb, "Model_conditions", "Lateral_Flow"), "NO_TEST")
        else:
            log.warning(" - no lateral flows found ")

        #---------------------------------------------------------------------
        mainutils.log_footer()
    except OutputError:
        sys.exit(1)
        
    except:
        log.exception("")
        sys.exit(1)

    finally:
        logging_config.cleanup()
        arcpy.CheckInExtension("DataInteroperability")
        del gp