Beispiel #1
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()
        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        # Input parameters
        if len(sys.argv) == 11:
            log.info("Reading input parameters")
            peilgebied = sys.argv[1]
            input_rr_peilgebied = sys.argv[2]
            input_rr_maaiveld = sys.argv[3]
            input_ahn = sys.argv[4]
            input_lgn = sys.argv[5]
            conversion = sys.argv[6]
            input_hymstat = sys.argv[7]
            output_risk_table = sys.argv[8]
            output_risico = sys.argv[9]
            output_risico_inundation = sys.argv[10]
        else:
            log.error("usage: <peilgebied> <input_rr_peilgebied> <input_rr_maaiveld> <input_ahn> <input_lgn>\
                      <conversion> <input_hymstat> <output_risk_table> <output_risico> <output_risico_inundation>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        log.info(" - read Conversion table")
        conv_list = [d for d in csv.DictReader(open(conversion))]

        expected_keys = ['LGN', 'K5', 'maxschade', 'sr1', 'sr2', 'sr3',
                         'sr4', 'sr5', 'sr6', 'sr7', 'sr8', 'sr9']
        for k in expected_keys:
            if k not in conv_list[0].keys():
                log.error('could not find key %s in conversion table' % k)
                sys.exit(2)

        schadefuncties = {}
        for item in conv_list:
            schadefuncties[int(item['LGN'])] = item
        #----------------------------------------------------------------------------------------
        log.info(" - read hymstat table")
        csv_list = [d for d in csv.DictReader(open(input_hymstat))]
        expected_hymstat_keys = ['Location', 'Scale par. beta', 'Location par. x0']

        for k in expected_hymstat_keys:
            csv_list[0].keys()
            if k not in csv_list[0].keys():
                log.error('could not find key %s in hymstat table' % k)
                sys.exit(2)

        hymstat = {}
        for item in csv_list:
            hymstat[item[config.get('risico', 'hymstat_id')]] = item
        #----------------------------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        #log.debug(" - check <input >: %s" % argument1)

        "<check geometry from input data, append to list if incorrect>"

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)

        #----------------------------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")

        missing_fields = []

        #<check required fields from input data, append them to list if missing>
        check_fields = {}#check_fields = {input_1: [fieldname1, fieldname2], input_2: [fieldname1, fieldname2]}
        for input_fc, fieldnames in check_fields.items():
            for fieldname in fieldnames:
                if not turtlebase.arcgis.is_fieldname(gp, input_fc, fieldname):
                    errormsg = "fieldname %s not available in %s" % (fieldname, input_fc)
                    log.error(errormsg)
                    missing_fields.append(errormsg)

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)

        #----------------------------------------------------------------------------------------
        # Environments
        log.info("Set environments")
        temp_peilgebieden = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Select_analysis(peilgebied, temp_peilgebieden)

        cellsize = gp.describe(input_ahn).MeanCellHeight  # use same cell size as AHN
        gp.extent = gp.describe(temp_peilgebieden).extent  # use extent from Peilgebieden
        gpgident = config.get('GENERAL', 'gpgident')

        #----------------------------------------------------------------------------------------
        # create ahn ascii
        log.info("Create ascii from ahn")

        ahn_ascii = turtlebase.arcgis.get_random_file_name(workspace, ".asc")
        log.debug("ahn ascii: %s" % ahn_ascii)
        gp.RasterToASCII_conversion(input_ahn, ahn_ascii)

        #----------------------------------------------------------------------------------------
        # create lgn ascii
        log.info("Create ascii from lgn")
        #read gpgident from file
        lgn_desc = gp.describe(input_lgn)
        if lgn_desc.DataType == 'ShapeFile' or lgn_desc.DataType == 'FeatureClass':
            lgn_fieldnames = nens.gp.get_table_def(gp, input_lgn)
            if "gridcode" in lgn_fieldnames:
                gridcode = "GRIDCODE"
            elif "grid_code" in lgn_fieldnames:
                gridcode = "grid_code"
            else:
                log.error("Cannot find 'grid_code' or 'gridcode' field in input lgn file")

            temp_lgn = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            gp.FeatureToRaster_conversion(input_lgn, gridcode, temp_lgn, cellsize)
        elif lgn_desc.DataType == 'RasterDataset':
            temp_lgn = input_lgn
            if not lgn_desc.MeanCellHeight == cellsize:
                log.error("LGN cellsize does not match AHN cellsize (%sx%s m)" % cellsize)
                sys.exit(5)
        else:
            log.error("cannot recognize datatype of LGN, must be a fc, shapefile or a raster dataset")
            sys.exit(5)

        lgn_ascii = turtlebase.arcgis.get_random_file_name(workspace, ".asc")
        log.debug("lgn ascii: %s" % lgn_ascii)
        gp.RasterToASCII_conversion(temp_lgn, lgn_ascii)

        #----------------------------------------------------------------------------------------
        log.info("Create ascii from surface level areas")
        if not turtlebase.arcgis.is_fieldname(gp, temp_peilgebieden, "ID_INT"):
            gp.AddField(temp_peilgebieden, "ID_INT", "LONG")

        id_int = 1
        idint_to_peilvakid = {}
        peilvakid_to_idint = {}
        if turtlebase.arcgis.is_fieldname(gp, temp_peilgebieden, gpgident):
            rows = gp.SearchCursor(temp_peilgebieden)
            for row in nens.gp.gp_iterator(rows):
                peilvakid = row.GetValue(gpgident)
                idint_to_peilvakid[id_int] = peilvakid
                peilvakid_to_idint[peilvakid] = id_int
                id_int = id_int + 1 #each row gets a new id_int

        log.info(" - calc value ID_INT")
        rows = gp.UpdateCursor(temp_peilgebieden)
        for row in nens.gp.gp_iterator(rows):
            gpg_ident = row.GetValue(gpgident)
            id_int = peilvakid_to_idint[gpg_ident]
            row.SetValue("ID_INT", id_int)
            rows.UpdateRow(row)

        log.info("Conversion feature peilgebieden to raster")
        InField = "ID_INT"
        temp_peilgebieden_raster = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.FeatureToRaster_conversion(temp_peilgebieden, InField, temp_peilgebieden_raster, cellsize)

        peilgeb_asc = turtlebase.arcgis.get_random_file_name(workspace, ".asc")
        gp.RasterToASCII_conversion(temp_peilgebieden_raster, peilgeb_asc)

        #----------------------------------------------------------------------------------------
        # Read input tables into dictionaries
        log.info("Read input tables")
        log.info(" - read RR_Peilgebied")
        rr_peilgebied = nens.gp.get_table(gp, input_rr_peilgebied, primary_key=gpgident.lower())
        log.info(" - read RR_Maaiveld")
        rr_maaiveld = nens.gp.get_table(gp, input_rr_maaiveld, primary_key=gpgident.lower())

        log.info(" - read conversion table between id_int and gpgident")
        gpg_conv = nens.gp.get_table(gp, temp_peilgebieden, primary_key='id_int')

        #----------------------------------------------------------------------------------------
        log.info("Calculate Risk")
        temp_risico = turtlebase.arcgis.get_random_file_name(workspace, "risk.asc")
        temp_risico_in = turtlebase.arcgis.get_random_file_name(workspace, ".asc")
        risico_tbl = turtlebase.risico.create_risk_grid(ahn_ascii, lgn_ascii,
                                                        peilgeb_asc, rr_peilgebied, rr_maaiveld,
                                                        hymstat, gpg_conv, schadefuncties, temp_risico,
                                                        temp_risico_in, cellsize)

        risk_result = turtlebase.risico.create_risico_dict(risico_tbl, schadefuncties, primary_key=gpgident)
        for k in risk_result.keys():
            risk_result[k]['SOURCE'] = "hymstat: %s, ahn: %s, lgn: %s" % (os.path.basename(input_hymstat),
                                                         os.path.basename(input_ahn),
                                                         os.path.basename(input_lgn))
            risk_result[k]['DATE_TIME'] = time.strftime("%d-%m-%Y, %H:%M:%S")

        gp.ASCIIToRaster_conversion(temp_risico, output_risico, "FLOAT")
        gp.ASCIIToRaster_conversion(temp_risico_in, output_risico_inundation, "FLOAT")

        # Schrijf de resultaten weg als een nieuwe tabel
        if not(gp.exists(output_risk_table)):
            log.info("creating table " + output_risk_table)
            gp.CreateTable(os.path.dirname(output_risk_table), os.path.basename(output_risk_table))

        risk_fields = nens.gp.get_table_def(gp, output_risk_table)
        fields_to_add = [{'fieldname': gpgident, 'fieldtype': 'text', 'length': 50},
                         {'fieldname': 'RIS_GW', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_GW_ST', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_GW_HL', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_GW_AK', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_GW_GR', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_GW_NT', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_IN', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_IN_ST', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_IN_HL', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_IN_AK', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_IN_GR', 'fieldtype': 'Double'},
                         {'fieldname': 'RIS_IN_NT', 'fieldtype': 'Double'},
                         {'fieldname': 'SOURCE', 'fieldtype': 'text', 'length': 256},
                         {'fieldname': 'DATE_TIME', 'fieldtype': 'text', 'length': 25},
                         {'fieldname': 'COMMENTS', 'fieldtype': 'text', 'length': 256}]

        for field_to_add in fields_to_add:
            if field_to_add['fieldname'].lower() not in risk_fields:
                if 'length' in field_to_add:
                    gp.addfield_management(output_risk_table, field_to_add['fieldname'], field_to_add['fieldtype'], "#", "#", field_to_add['length'])
                else:
                    gp.addfield_management(output_risk_table, field_to_add['fieldname'], field_to_add['fieldtype'])

        turtlebase.arcgis.write_result_to_output(output_risk_table, gpgident, risk_result)
        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        tempfiles = os.listdir(workspace)
        for tempfile in tempfiles:
            if tempfile.endswith('.asc') or tempfile.endswith('.prj') :
                try:
                    os.remove(os.path.join(workspace, tempfile))
                    log.debug("%s/%s removed" % (workspace, tempfile))
                except Exception, e:
                    log.debug(e)

        mainutils.log_footer()
            
            #check if output_table has the correct rows
            log.info("Checking fields...")
            for field_name, field_settings in cropFields.items():
                if 'length' in field_settings:
                    if not turtlebase.arcgis.is_fieldname(gp, output_crop_table, field_name):
                        gp.AddField(output_crop_table, field_name, field_settings['type'], '#', '#', field_settings['length'])
                else:
                    if not turtlebase.arcgis.is_fieldname(gp, output_crop_table, field_name):
                        gp.AddField(output_crop_table, field_name, field_settings['type'])
                        
            write_result_to_output(gp, output_crop_table, gpgident.lower(), output_gewas_areas)
        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        #ernst rekenklasse
        ernst_drainage = ernst()

        #----------------------------------------------------------------------------------------
        #check inputfields
        log.info("Getting commandline parameters... ")
        if len(sys.argv) == 7:
            file_input_peilgebieden_feature = sys.argv[1] #shape
            file_input_peilvakgegevens = sys.argv[2] #[ZOMERPEIL],[WINTERPEIL]
            file_input_kwelstroom = sys.argv[3] #[KWELSTROOM]
            file_input_maaiveldkarakteristiek = sys.argv[4] #[MV_HGT_50]
            file_input_bodemsoort = sys.argv[5] #shape
            file_output = sys.argv[6]
        else:
            log.error("Usage: python rural_drainageparameter.py <peilgebieden shape> <peilvakgegevens> <kwelstroom> <maaiveldkarakteristiek> <bodemsoort shape> <outputtabel HydroBase>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        # Check geometry
        log.info("Check geometry of input parameters")
        if not turtlebase.arcgis.is_file_of_type(gp, file_input_peilgebieden_feature, 'Polygon'):
            log.error("Input %s does not contain polygons" % file_input_peilgebieden_feature)
            sys.exit(1)
        if not turtlebase.arcgis.is_file_of_type(gp, file_input_bodemsoort, 'Polygon'):
            log.error("Input %s does not contain polygons" % file_input_bodemsoort)
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        # Check required fields
        log.info("Check required fields in input data")
        peilgebied_id = config.get('GENERAL', 'gpgident')
        pawn_code = config.get('Ernst', 'input_bodemsoort_code')

        missing_fields = []
        check_fields = {file_input_peilgebieden_feature: peilgebied_id,
                      file_input_peilvakgegevens: peilgebied_id,
                      file_input_peilvakgegevens: config.get('Ernst', 'peilvakgegevens_zomerpeil'),
                      file_input_peilvakgegevens: config.get('Ernst', 'peilvakgegevens_winterpeil'),
                      file_input_kwelstroom: peilgebied_id,
                      file_input_kwelstroom: config.get('Ernst', 'kwelstroom_kwelstroom'),
                      file_input_maaiveldkarakteristiek: peilgebied_id,
                      file_input_maaiveldkarakteristiek: config.get('Ernst', 'maaiveldkarakteristiek_value'),
                      file_input_bodemsoort: pawn_code}

        for input_file, field in check_fields.items():
            if not turtlebase.arcgis.is_fieldname(gp, input_file, field):
                log.error("Missing field %s in %s" % (field, input_file))
                missing_fields.append("missing %s in %s" % (field, input_file))

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #----------------------------------------------------------------------------------------
        # Check record count
        log.info("Check records of input parameters")
        count_area = turtlebase.arcgis.fc_records(gp, file_input_peilgebieden_feature)
        count_surface_level_table = turtlebase.arcgis.fc_records(gp, file_input_peilvakgegevens)
        count_seepage = turtlebase.arcgis.fc_records(gp, file_input_kwelstroom)
        count_scurve = turtlebase.arcgis.fc_records(gp, file_input_maaiveldkarakteristiek)

        if count_surface_level_table != count_area:
            log.error("input %s (%s records) contains not the same records as %s (%s records)" % (file_input_peilvakgegevens, count_surface_level_table,
                                                                                                 file_input_peilgebieden_feature, count_area))
            sys.exit(2)
        if count_seepage != count_area:
            log.error("input %s (%s records) contains not the same records as %s (%s records)" % (file_input_kwelstroom, count_seepage,
                                                                                                 file_input_peilgebieden_feature, count_area))
            sys.exit(2)
        if count_scurve != count_area:
            log.error("input %s (%s records) contains not the same records as %s (%s records)" % (file_input_maaiveldkarakteristiek,
                                                                                                 count_scurve, file_input_peilgebieden_feature, count_area))
            sys.exit(2)

        #----------------------------------------------------------------------------------------
        #A: bodemsoort
        log.info("A-1) Copy peilgebieden to temporary workspace")
        temp_peilgebieden = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.select_analysis(file_input_peilgebieden_feature, temp_peilgebieden)

        log.info("A-2) Copy bodemsoort to temporary workspace")
        temp_bodemsoort = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.select_analysis(file_input_bodemsoort, temp_bodemsoort)

        log.info("A-3) Intersect bodemsoort + peilgebieden -> peilg+bodem")
        temp_intersect_bodem_peilgebieden = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Intersect_analysis(temp_peilgebieden + "; " + temp_bodemsoort, temp_intersect_bodem_peilgebieden)

        log.info("A-4) Dissolve peilg+bodem")
        temp_dissolve_bodem_peilgebieden = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Dissolve_management (temp_intersect_bodem_peilgebieden, temp_dissolve_bodem_peilgebieden, peilgebied_id + " ;" + pawn_code, "")

        log.info("A-5) Read peilg+bodem(dissolve)")
        log.info(" - reading shape")
        peilv_grondsoort = {}
        row = gp.SearchCursor(temp_dissolve_bodem_peilgebieden)
        for item in nens.gp.gp_iterator(row):
            area_id = item.GetValue(peilgebied_id)
            soil_id = item.GetValue(pawn_code)
            area = item.Shape.Area
            data_row = {'pawn_code': soil_id, 'area': area}
            if not(peilv_grondsoort.has_key(area_id)):
                peilv_grondsoort[area_id] = {'grondsoort':[]}
            peilv_grondsoort[area_id]['grondsoort'].append(data_row)

        log.info(" - sorting")
        for key in peilv_grondsoort.keys():
            peilv_grondsoort[key]['grondsoort'].sort(sort_area_rev)
            peilv_grondsoort[key]['area'] = sum_grondsoort(peilv_grondsoort[key]['grondsoort'])

        # ---------------------------------------------------------------------------
        #B: ernst parameters

        #inlezen van shape files: [ZOMERPEIL, WINTERPEIL, KWELSTROOM, MV_HGT_50]
        log.info("B-1) Reading inputfile peilvakgegevens")
        data_set = {}

        row = gp.SearchCursor(file_input_peilvakgegevens)
        for item in nens.gp.gp_iterator(row):
            field_id = item.GetValue(peilgebied_id)
            data_set[field_id] = {}
            data_set[field_id]['zomerpeil'] = item.GetValue(config.get('Ernst', 'peilvakgegevens_zomerpeil'))
            data_set[field_id]['winterpeil'] = item.GetValue(config.get('Ernst', 'peilvakgegevens_winterpeil'))

            if (data_set[field_id]['zomerpeil'] < float(config.get('Ernst', 'validate_min_zomerpeil'))) or (data_set[field_id]['zomerpeil'] > float(config.get('Ernst', 'validate_max_zomerpeil'))):
                log.error("zomerpeil has a non-valid value of " + str(data_set[field_id]['zomerpeil']))
                sys.exit(5)
            if (data_set[field_id]['winterpeil'] < float(config.get('Ernst', 'validate_min_winterpeil'))) or (data_set[field_id]['zomerpeil'] > float(config.get('Ernst', 'validate_max_winterpeil'))):
                log.error("winterpeil has a non-valid value of " + str(data_set[field_id]['winterpeil']))
                sys.exit(5)

        #inlezen van shape files: [ZOMERPEIL, WINTERPEIL, KWELSTROOM, MV_HGT_50]
        log.info("B-2) Reading inputfile kwelstroom")
        row = gp.SearchCursor(file_input_kwelstroom)
        for item in nens.gp.gp_iterator(row):
            field_id = item.GetValue(peilgebied_id)
            if not(data_set.has_key(field_id)):
                log.error("non-matching kwelstroom and peilvakgegevens, check if peilvakgegevens has key '" + field_id + "'")
                sys.exit(9)
            data_set[field_id]['kwel'] = item.GetValue(config.get('Ernst', 'kwelstroom_kwelstroom'))

        #inlezen van shape files: [ZOMERPEIL, WINTERPEIL, KWELSTROOM, MV_HGT_50]
        log.info("B-3) Reading inputfile maaiveldkarakteristiek")
        row = gp.SearchCursor(file_input_maaiveldkarakteristiek)
        for item in nens.gp.gp_iterator(row):
            field_id = item.GetValue(peilgebied_id)
            if not(data_set.has_key(field_id)):
                log.error("non-matching maaiveldkarakteristiek and peilvakgegevens, check if peilvakgegevens has key '" + field_id + "'")
                sys.exit(9)
            data_set[field_id]['maaiveld'] = item.GetValue(config.get('Ernst', 'maaiveldkarakteristiek_value'))

        # ---------------------------------------------------------------------------
        #check input: each record should contain all fields (count: 4)
        log.info("B-4) Checking input")
        for key, value in data_set.items():
            if len(value.items()) != 4:
                log.error(key, value)
                log.error("check if inputfiles match with eachother!")
                sys.exit(6)

        # ---------------------------------------------------------------------------
        #bepaling drooglegging: [DL] = [MV_HGT_50] - max([WINTERPEIL], [ZOMERPEIL])
        #bepaling drainageweerstand [ALFA_LZ] = xx * [DL} - yy, waarbij xx, yy afhangen van de klasse
        #bepaling INF_OPWAT, OPP_AFVOER
        log.info("B-6) preparing data for output")
        data_set_output = {}
        import time
        date_str = time.strftime("%d %B %Y %H:%M:%S")
        log.info("Calculating GRONDSOORT, drooglegging, ALFA_LZ, INF_OPWAT, OPP_AFVOER... ")
        log.info(" - Datum-string: " + date_str)
        for key, item in data_set.items():
            #print key, item
            data_set[key]['drooglegging'] = ernst_drainage.calc_dl(item['maaiveld'], item['zomerpeil'], item['winterpeil'])
            data_set_output[key] = {}
            data_set_output[key][peilgebied_id] = key #important!
            data_set_output[key][config.get('Ernst', 'output_alfa_lz')] = ernst_drainage.calc_alfa(data_set[key]['kwel'], data_set[key]['drooglegging'])
            data_set_output[key][config.get('Ernst', 'output_inf_opwat')] = 500 #of dataset['key']['ALFA_LZ']*1.5
            data_set_output[key][config.get('Ernst', 'output_opp_afvoer')] = 0.5
            grondsrt_str = ""
            try:
                data_set_output[key][config.get('Ernst', 'output_grondsoort')] = peilv_grondsoort[key]['grondsoort'][0]['pawn_code']
                for idx in range(min(len(peilv_grondsoort[key]['grondsoort']), 5)):
                    grondsrt_str = grondsrt_str + str(peilv_grondsoort[key]['grondsoort'][idx]['pawn_code']) + "(" + str(int(100 * peilv_grondsoort[key]['grondsoort'][idx]['area'] / peilv_grondsoort[key]['area'])) + "%) "
            except Exception, e:
                log.warning(e)
                log.warning("id " + key + " has no " + config.get('Ernst', 'output_grondsoort') + " value!")
                data_set_output[key][config.get('Ernst', 'output_grondsoort')] = -1
            source_str = "grondsrt:" + grondsrt_str + "pv:" + os.path.basename(file_input_peilvakgegevens) + " kwel:" + os.path.basename(file_input_kwelstroom) + " mv:" + os.path.basename(file_input_maaiveldkarakteristiek)
            if len(source_str) > 50:
                source_str = source_str[:50]
            data_set_output[key]['SOURCE'] = source_str
            data_set_output[key]['DATE_TIME'] = date_str

        # ---------------------------------------------------------------------------
        #C: output
        #add cols [ALFA_LZ], [INF_OPWAT], [OPP_AFVOER]
        drainageFields = {peilgebied_id: {'type': 'TEXT', 'length': '30'},
                          config.get('Ernst', 'output_alfa_lz'):{'type': 'DOUBLE'},
                          config.get('Ernst', 'output_inf_opwat'):{'type': 'DOUBLE'},
                          config.get('Ernst', 'output_opp_afvoer'):{'type': 'DOUBLE'},
                          config.get('Ernst', 'output_grondsoort'):{'type': 'INTEGER'},
                          'SOURCE':{'type': 'TEXT', 'length': '256'},
                          'DATE_TIME':{'type': 'TEXT', 'length': '40'},
                          'COMMENTS':{'type': 'TEXT', 'length': '256'}}

        #check if output_table exists. if not, create with correct rows
        log.info("C-1) Checking output table... ")
        if not(gp.exists(file_output)):
            gp.CreateTable(os.path.dirname(file_output), os.path.basename(file_output))

        #check if output_table has the correct rows
        log.info("C-2) Checking fields... ")
        for field_name, field_settings in drainageFields.items():
            if field_settings.has_key('length'):
                if not turtlebase.arcgis.is_fieldname(gp, file_output, field_name):
                    gp.AddField(file_output, field_name, field_settings['type'], '#', '#', field_settings['length'])
            else:
                if not turtlebase.arcgis.is_fieldname(gp, file_output, field_name):
                    gp.AddField(file_output, field_name, field_settings['type'])

        # ---------------------------------------------------------------------------
        # Write results to output table
        log.info("Write results to output table")
        turtlebase.arcgis.write_result_to_output(file_output, peilgebied_id, data_set_output)
        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()
        log.info("workspace: %s" % workspace)

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #------------------------------------------------
        log.info("Reading and checking input")
        rekenpunten = sys.argv[1]
        waterlijnen = sys.argv[2]
        peilgebieden = sys.argv[3] #optioneel
        output_bergingstakken = sys.argv[4]
        gpgident = config.get('General', 'gpgident')
        if turtlebase.arcgis.is_fieldname(gp, peilgebieden, gpgident):
            peilgebieden_list = nens.gp.get_table(gp, peilgebieden, primary_key=gpgident.lower())
        else:
            log.error("field %s is missing in %s", gpgident, peilgebieden)
            sys.exit(1)

        if not turtlebase.arcgis.is_fieldname(gp, rekenpunten, gpgident):
            log.error("field %s is missing in %s", gpgident, rekenpunten)
            sys.exit(1)

        log.info("Controleer of de opgegeven bestandsnamen arcgis compatibel zijn")

        for argv in sys.argv[1:]:
            turtlebase.filenames.check_filename(argv)

        #uitlezen x en y coordinaat van de rekenpunten
        log.info("Inlezen rekenpunten")

        rekenpunten_x_y_coordinaten = bepalen_x_y_coordinaat(gp, rekenpunten, gpgident)
        log.info("Kopieer " + waterlijnen + " naar de workspace")
        waterlijnen_lokaal = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        log.debug("Kopieer de waterlijnen naar een lokale directory ")
        gp.select_analysis(waterlijnen, waterlijnen_lokaal)
        log.info("Bereken eindpunten van potentiele bergingstakken rondom rekenpunten")
        dict_stars = create_dict_stars_around_rekenpunten(peilgebieden_list, config, rekenpunten_x_y_coordinaten)

        joined_dictionaries = join_dictionaries(dict_stars, rekenpunten_x_y_coordinaten)
        star = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        log.info("Aanmaken potentiele bergingstakken vanuit rekenpunt ")

        createLineFromPoints(gp, joined_dictionaries, 'gpgident', star)
        intersect = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        log.info("Bereken kruisingen van potentiele bergingstakken met waterlijnen")
        #Buffer_analysis (in_features, out_feature_class, buffer_distance_or_field, line_side, line_end_type, dissolve_option, dissolve_field)
       
        gp.Intersect_analysis(star + ";" + waterlijnen_lokaal, intersect, "#", "#", "POINT")
        intersect_x_y_coordinaten = bepalen_x_y_coordinaat(gp, intersect, gpgident)

        remainingpoints_to_be_removed_from_star = remove_duplicate_values_from_dictionaries(rekenpunten_x_y_coordinaten, intersect_x_y_coordinaten)

        #nu remainingpoints_to_be_removed_from_star dictionary de keys vergelijken met de id in star en dan record verwijderen

        log.info("Bepaal overgebleven eindpunten van bergingstakken")
        remove_records_from_shapefile_based_on_keys_in_dict(gp, star, gpgident, remainingpoints_to_be_removed_from_star)

        star_punten = turtlebase.arcgis.get_random_file_name(workspace_gdb)

        #nu worden coordinaten uitgelezen uit de star_punten shape (lijnen)
        log.info("Bereken ideale bergingstak")
        create_points_from_dict(gp, dict_stars, star_punten, gpgident)

        intersect2 = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Intersect_analysis(star_punten + ";" + star, intersect2, "#", "#", "POINT")
        log.info("Bereken afstand potentiele bergingstakken naar waterlijn")
        log.debug("Als eerste wordt een buffer aangemaakt ")

        buffer_star = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Buffer_analysis(rekenpunten, buffer_star, int(config.get('bergingstakken', 'length_of_breach')))
        snijpunt_waterlijn = turtlebase.arcgis.get_random_file_name(workspace_gdb)

        log.debug("Nu intersect van de buffer met de waterlijnen. Deze punten worden gebruikt om de afstand naar de waterlijn te berekenen ")
        gp.Intersect_analysis(buffer_star + ";" + waterlijnen_lokaal, snijpunt_waterlijn, "#", "#", "POINT")

        log.debug("Nu wegschrijven van de coordinaten van de snijpunten met de waterlijn naar een dictionary")
        snijpunten_waterlijn_dict = bepalen_x_y_coordinaat_meerdere_punten(gp, snijpunt_waterlijn, gpgident)

        log.debug("Nu wegschrijven van de coordinaten van de overgebleven punten van de ster naar een dictionary")
        punten_star_dict = bepalen_x_y_coordinaat_meerdere_punten(gp, intersect2, gpgident)

        log.debug("Er zijn 2 of meer punten op de waterlijn waarnaar de punten van de ster een afstand hebben")
        log.debug("Berekend wordt welke de minimale afstand oplevert tussen punt van de ster en waterlijn")
        #nu afstand berekenen mbv de distance calculator uit vorige script tussen snijpunten_waterlijn_dict en intersect2
        minimaldistance_dict_star_points = calculate_distance_between_points(snijpunten_waterlijn_dict, punten_star_dict)
        log.info("Berekend wordt welk punt van de bergingstak het verst van de waterlijn verwijderd is")
        list_with_ideal_points = bepaal_ideale_punt_bergingstak(minimaldistance_dict_star_points)
        out_data = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Copy_management (star, out_data)

        log.info("Selecteer de bergingstakken die loodrecht op waterlijn staan")
        remove_records_from_shapefile_not_in_list(gp, star, gpgident, list_with_ideal_points)
        #koppel de lijnen aan de RR_oppervlak tabel en neem de openwat_HA waarden over
        log.debug("De gpgident wordt weer teruggehaald ui de unieke peilgebiedid")
        clean_up_star(gp, star, gpgident)
        #intersect van star met zichzelf. als er iets overblijft dan geef een warning met de betreffende peilgebied id, mededeling
        # voor de gebruiker dat hij/zij daar even handmatig wat aan aan moet passen.
        log.info("Creeeren out_shape bergingstakken")
        log.info('%s  star' %star)
        log.info('%s  star'% output_bergingstakken)
        gp.select_analysis(star, output_bergingstakken)


        
        
        log.info("Check of er bergingstakken zijn die overlappen ")
        try:
            intersect3 = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            gp.Intersect_analysis(output_bergingstakken, intersect3, "#", "#", "POINT")
            #loop door de output van de intersect en geeft de GPGident weer als deze in de attribute table staat
            row = gp.SearchCursor(intersect3)
            for item in nens.gp.gp_iterator(row):
                gpg_ident = item.getValue(gpgident)
                log.warning("In peilgebied " + str(gpg_ident) + " overlapt de bergingstak met een andere bergingstak. Pas dit handmatig aan!")
        except (RuntimeError, TypeError, NameError):
            log.info('Geen overlap aanwezig')    
            
        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()

    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #---------------------------------------------------------------------
        # Input parameters
        """
        nodig voor deze tool:
        """
        if len(sys.argv) == 6:
            log.info("Reading input parameters")
            mpoint = sys.argv[1]
            hydroline = sys.argv[2]
            output_xyz = sys.argv[3]
            output_yz = sys.argv[4]
            output_locations = sys.argv[5]
        else:
            log.warning("usage: <hydroline> <mpoint> <output_xyz> <output_yz>")
            sys.exit(1)

        #---------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        #log.debug(" - check <input >: %s" % argument1)

        #"<check geometry from input data, append to list if incorrect>"

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        #---------------------------------------------------------------------
        ovkident = 'ovkident'
        proident = 'proident'
        zcoord = 'ZH'
        # Check required fields in input data
        log.info("Check required fields in input data")

        missing_fields = []

        #<check required fields from input data,
        #        append them to list if missing>
        check_fields = {hydroline: [ovkident],
                        mpoint: [proident, zcoord]}
        #check_fields = {input_1: [fieldname1, fieldname2],
        #                 input_2: [fieldname1, fieldname2]}
        for input_fc, fieldnames in check_fields.items():
            for fieldname in fieldnames:
                if not turtlebase.arcgis.is_fieldname(
                        gp, input_fc, fieldname):
                    errormsg = "fieldname %s not available in %s" % (
                                    fieldname, input_fc)
                    log.error(errormsg)
                    missing_fields.append(errormsg)

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #---------------------------------------------------------------------
        multipoints = turtlebase.arcgis.get_random_file_name(workspace, ".shp")
        log.info("Dissolving pointcloud to multipoint")
        gp.Dissolve_management(mpoint, multipoints, proident)

        if output_locations == '#':
            output_locations = (
                turtlebase.arcgis.get_random_file_name(workspace_gdb))
        log.info("Calculating coordinates of centerpoints")
        create_centroids(gp, multipoints, output_locations, proident)

        centerpoints_sj = turtlebase.arcgis.get_random_file_name(workspace, ".shp")
        log.info("Calculation adjacent hydrolines")
        gp.SpatialJoin_analysis(output_locations, hydroline, centerpoints_sj,
                                'JOIN_ONE_TO_ONE', "#", "#", "CLOSEST", 100)

        log.info("Reading center points")
        centerpoints_d = nens.gp.get_table(gp, centerpoints_sj,
                                           primary_key=proident)

        log.info("Reading hydrolines")
        lineparts = get_line_parts(gp, hydroline, ovkident)
        log.info("Reading pointcloud")
        pointcloud = get_pointcloud(gp, mpoint, proident, zcoord)

        log.info("Sorting profiles")
        profiles_xyz, profiles_yz = sort_pointcloud(gp, centerpoints_d,
                                                    lineparts, pointcloud)
        log.info("Write xyz points to output")
        write_profiles_xyz(gp, profiles_xyz, output_xyz)
        log.info("Write yz information to output table")
        write_profiles_yz(gp, profiles_yz, output_yz)

        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(multipoints)
            gp.delete(centerpoints_sj)
            

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()

    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        # Input parameters
        if len(sys.argv) == 5:
            # input parameter
            input_external_weir = sys.argv[1]
            input_voronoi_polygon = sys.argv[2]
            input_rrcf_waterlevel = sys.argv[3]
            # output parameters
            output_table_external_weir = sys.argv[4]
        else:
            log.error("usage: <input_external_weir> <input_voronoi_polygon> <input rrcf waterlevel> <output_table_external_weir>")
            sys.exit(1)

        temp_voronoi = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.select_analysis(input_voronoi_polygon, temp_voronoi)
        #----------------------------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        log.debug(" - check input_external_weir: %s" % input_external_weir)
        if gp.describe(input_external_weir).ShapeType != 'Point':
            log.error("Input_external_weir is not a point feature class!")
            geometry_check_list.append(input_external_weir + " -> (Point)")

        log.debug(" - check voronoi polygon: %s" % temp_voronoi)
        if gp.describe(temp_voronoi).ShapeType != 'Polygon':
            log.error("Input voronoi is not a polygon feature class!")
            geometry_check_list.append(temp_voronoi + " -> (Polygon)")

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        #----------------------------------------------------------------------------------------
        # Check required fields in database
        log.info("Check required fields in input data")

        missing_fields = []
        if not turtlebase.arcgis.is_fieldname(gp, temp_voronoi, config.get('toetsing_overstorten', 'calculation_point_ident')):
            log.debug(" - missing: %s in %s" % (config.get('toetsing_overstorten', 'calculation_point_ident'), temp_voronoi))
            missing_fields.append("%s: %s" % (temp_voronoi, config.get('toetsing_overstorten', 'calculation_point_ident')))

        if not turtlebase.arcgis.is_fieldname(gp, input_rrcf_waterlevel, config.get('toetsing_overstorten', 'field_waterstand')):
            log.debug(" - missing: %s in %s" % (config.get('toetsing_overstorten', 'field_waterstand'), input_rrcf_waterlevel))
            missing_fields.append("%s: %s" % (input_rrcf_waterlevel, config.get('toetsing_overstorten', 'field_waterstand')))

        if not turtlebase.arcgis.is_fieldname(gp, input_external_weir, config.get('toetsing_overstorten', 'overstort_ident')):
            log.debug(" - missing: %s in %s" % (config.get('toetsing_overstorten', 'overstort_ident'), input_external_weir))
            missing_fields.append("%s: %s" % (input_external_weir, config.get('toetsing_overstorten', 'overstort_ident')))

        if not turtlebase.arcgis.is_fieldname(gp, input_external_weir, config.get('toetsing_overstorten', 'drempelhoogte')):
            log.debug(" - missing: %s in %s" % (config.get('toetsing_overstorten', 'drempelhoogte'), input_external_weir))
            missing_fields.append("%s: %s" % (input_external_weir, config.get('toetsing_overstorten', 'drempelhoogte')))

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)

        #----------------------------------------------------------------------------------------
        # read waterlevel table as a dictionary
        log.info("Read waterlevel table")
        waterlevel_dict = nens.gp.get_table(gp, input_rrcf_waterlevel, primary_key=config.get('toetsing_overstorten', 'calculation_point_ident').lower())
        log.debug(waterlevel_dict)

        # Add fields to output
        if not turtlebase.arcgis.is_fieldname(gp, temp_voronoi, config.get('toetsing_overstorten', 'field_waterstand')):
            log.info(" - add field %s" % config.get('toetsing_overstorten', 'field_waterstand'))
            gp.addfield(temp_voronoi, "%s" % config.get('toetsing_overstorten', 'field_waterstand'), "double")

        
        # copy waterlevel to voronoi polygons
        field_config_waterstand = config.get('toetsing_overstorten', 'field_waterstand').lower()
        field_calculation_point_ident = config.get('toetsing_overstorten', 'calculation_point_ident')
        
        rows = gp.UpdateCursor(temp_voronoi)
        for row in nens.gp.gp_iterator(rows):
            row_id = row.GetValue(field_calculation_point_ident)
            
            if waterlevel_dict.has_key(row_id):
                log.debug(waterlevel_dict[row_id])
                row.SetValue(field_config_waterstand, waterlevel_dict[row_id][field_config_waterstand])

                rows.UpdateRow(row)

        #----------------------------------------------------------------------------------------
        # Join external weirs to voronoi using spatial location (spatial join)
        log.info("join waterlevel to external weirs using a spatial location")
        temp_spatial_join = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        #gp.SpatialJoin_analysis(input_external_weir, temp_voronoi, temp_spatial_join, "JOIN_ONE_TO_ONE", "#", "#", "INTERSECTS")
        gp.Intersect_Analysis(input_external_weir + ';' + temp_voronoi, temp_spatial_join)
        
        external_weir_dict = nens.gp.get_table(gp, temp_spatial_join, primary_key=config.get('toetsing_overstorten', 'overstort_ident').lower())

        result_dict = {}
        for k, v in external_weir_dict.items():
            waterlevel = v[config.get('toetsing_overstorten', 'field_waterstand').lower()]
            weir_height = v[config.get('toetsing_overstorten', 'drempelhoogte').lower()]
            if waterlevel is None or weir_height is None:
                waterlevel = -999
                weir_height = -999
                result_value = 9
            else:
                if float(waterlevel) > float(weir_height):
                    result_value = 1
                else:
                    result_value = 0

            result_dict[k] = {config.get('toetsing_overstorten', 'overstort_ident'): k,
                              config.get('toetsing_overstorten', 'field_waterstand'): waterlevel,
                              config.get('toetsing_overstorten', 'drempelhoogte'): weir_height,
                              config.get('toetsing_overstorten', 'field_toetsing_overlast_stedelijk'): result_value}
        #----------------------------------------------------------------------------------------
        # Create output table
        if not gp.exists(output_table_external_weir):
            log.info("Create new output table")
            temp_result_table = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            gp.CreateTable_management(os.path.dirname(temp_result_table), os.path.basename(temp_result_table))
            copy_table = True
        else:
            temp_result_table = output_table_external_weir
            copy_table = False

        fields_to_add = [config.get('toetsing_overstorten', 'field_waterstand'),
                         config.get('toetsing_overstorten', 'drempelhoogte'),
                         config.get('toetsing_overstorten', 'field_toetsing_overlast_stedelijk')]

        if not turtlebase.arcgis.is_fieldname(gp, temp_result_table, config.get('toetsing_overstorten', 'overstort_ident')):
            log.debug(" - add field %s to %s" % (config.get('toetsing_overstorten', 'overstort_ident'), temp_result_table))
            gp.addfield_management(temp_result_table, config.get('toetsing_overstorten', 'overstort_ident'), 'text')

        for field in fields_to_add:
            if not turtlebase.arcgis.is_fieldname(gp, temp_result_table, field):
                log.debug(" - add field %s to %s" % (field, temp_result_table))
                gp.addfield_management(temp_result_table, field, 'double')

        #----------------------------------------------------------------------------------------
        # Write results to output table
        log.info("Write results to output table")
        turtlebase.arcgis.write_result_to_output(temp_result_table, config.get('toetsing_overstorten', 'overstort_ident').lower(), result_dict)

        if copy_table == True:
            gp.TableToTable_conversion(temp_result_table, os.path.dirname(output_table_external_weir), os.path.basename(output_table_external_weir))

        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Beispiel #7
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #---------------------------------------------------------------------
        # Input parameters
        """
        nodig voor deze tool:
        """
        if len(sys.argv) == 4:
            user_input = sys.argv[1]
            flip_field = sys.argv[2].lower()
            output_shape = sys.argv[3]
        else:
            log.warning("usage: <user_input> <output_shape>")
            sys.exit(1)

        tempfiles = []
        input_shape = turtlebase.arcgis.get_random_file_name(workspace, '.shp')
        gp.Select_analysis(user_input, input_shape)
        #---------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        #log.debug(" - check <input >: %s" % argument1)
        if not turtlebase.arcgis.is_file_of_type(gp, input_shape, 'Polyline'):
            log.error("%s is not a %s feature class!" % (input_shape, 'Polyline'))
            geometry_check_list.append("%s -> (%s)" % (input_shape, 'Polyline'))

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        #---------------------------------------------------------------------
        # Check required fields in input data
        ovk_field = config.get('general', 'ovkident').lower()
        missing_fields = []
        check_fields = {input_shape: ['Sum_OPP_LA', 'Sum_OPP_ST',
                        ovk_field, 'from_x', 'from_y', 'to_x', 'to_y', flip_field]}

        for input_fc, fieldnames in check_fields.items():
            for fieldname in fieldnames:
                if not turtlebase.arcgis.is_fieldname(
                        gp, input_fc, fieldname):
                    errormsg = "fieldname %s not available in %s" % (
                                    fieldname, input_fc)
                    log.error(errormsg)
                    missing_fields.append(errormsg)

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #---------------------------------------------------------------------
        #create output:
        fields_to_add = [(ovk_field, 'TEXT'),
                         ('incoming', 'SHORT'),
                         ('examined', 'SHORT'),
                         ('terminal', 'SHORT'),
                         ('som_sted', 'DOUBLE'),
                         ('som_land', 'DOUBLE'),
                         ('som_totaal', 'DOUBLE'),
                         ('bottleneck', 'SHORT'),
                         (flip_field, 'SHORT')]
        gp.select_analysis(input_shape, output_shape)

        new_feat = {}
        new_geometry = {}
        log.info("Inlezen geometrie en omdraaien van de geometrie")

        fieldnames_dict = nens.gp.get_table_def(gp, input_shape)
        log.debug(fieldnames_dict)
        desc = gp.describe(input_shape)
        count = 0
        rows = gp.SearchCursor(input_shape)
        row = rows.Next()
        while row:

            flip_boolean = row.getValue(flip_field)

            if flip_boolean == 1:
                count += 1
                #read features
                feat = row.getValue(desc.ShapeFieldName)
                ovkident = row.getValue(ovk_field)
                new_feat = flip_geometry(gp, feat, ovkident, new_feat)
                ##new_feat = feat

                #store geometry information in dictionary
                if ovkident not in new_geometry:
                    new_geometry[ovkident] = {}
                #store all information from the attribute table
                for column in fields_to_add:
                    column = column[0]

                    #columns with from en to for x and y need to be switched as well
                    if column == 'from_x':
                        lookup_column = 'to_x'
                    elif column == 'from_y':
                        lookup_column = 'to_y'
                    elif column == 'to_y':
                        lookup_column = 'from_y'
                    elif column == 'to_x':
                        lookup_column = 'from_x'
                    else:
                        # no switch needed
                        lookup_column = column

                    if column != 'opm':
                        if lookup_column in fieldnames_dict:
                            update_value = row.getValue(lookup_column)
                            try:
                                float_value = float(update_value)
                                new_geometry[ovkident][column] = float_value
                            except:
                                log.debug("geen float")
                                new_geometry[ovkident][column] = row.getValue(lookup_column)
                            log.debug(new_geometry[ovkident][column])
                #waterlijn wordt opgeslagen in dictionary
                if column == 'opm':
                    new_geometry[ovkident][column] = "Lijn is omgedraaid"
                log.info("Opslaan van waterlijn: " + str(ovkident))
            row = rows.Next()
        del row, rows
        #remove the lines that are going to be flipped

        removed_lines = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        #alleen als er inderdaad lijnen gedraaid worden moet de tempfile aangemaakt worden.
        gp.select_analysis(input_shape, removed_lines)

        #first remove lines that are going to be duplicate in the end result. lines are
        # remove from a copy of the input file.
        row = gp.UpdateCursor(removed_lines)
        log.info("Verwijder dubbele rijen")
        for item in nens.gp.gp_iterator(row):
            if item.getValue(flip_field) == 1:
                row.DeleteRow(item)

        temp_shape = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        tempfiles.append(temp_shape)

        #creates new lines in workspace with same name as output_shape
        count = create_line_from_dict(gp, workspace_gdb, new_feat, fields_to_add, new_geometry, temp_shape)
        
        if count == 0:
            log.warning("Er zijn geen lijnen omgedraaid")
            log.warning("Door de gebruiker is in de kolom " + str(flip_field) + " geen 1 ingevuld")
        else:
            tempfiles.append(removed_lines)

        #merge new lines with output
        gp.Merge_management(temp_shape + ";" + removed_lines, output_shape)

        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)
            turtlebase.arcgis.remove_tempfiles(gp, log, tempfiles)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)
        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        #check inputfields
        log.info("Getting commandline parameters... ")
        if len(sys.argv) == 5:
            input_polygon_fc = sys.argv[1] #peilgebieden waarbinnen de afvoervakken moeten worden gezocht
            input_channel_fc = sys.argv[2] #lijnstukken waarvan het dichtsbijzijnde gebied moet worden gezocht
            output_afvoervlakken_shp = sys.argv[3] #shapefile van de gecreerde afvoervakken per lijnstuk
            use_intersect_channel = sys.argv[4] # boolean, opknippen channel: ja of nee

        else:
            log.error("Usage: python rural_genereren_afvoervlakken.py <peilgebieden shape> <waterlijnen shape> <output shape>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        # Check geometry input parameters
        log.info("Check geometry of input parameters")
        geometry_check_list = []

        if not turtlebase.arcgis.is_file_of_type(gp, input_polygon_fc, 'Polygon'):
            log.error("%s is not a %s feature class!" % (input_polygon_fc, 'Polygon'))
            geometry_check_list.append("%s -> (%s)" % (input_polygon_fc, 'Polygon'))

        if not turtlebase.arcgis.is_file_of_type(gp, input_channel_fc, 'Polyline'):
            log.error("%s is not a %s feature class!" % (input_channel_fc, 'Polyline'))
            geometry_check_list.append("%s -> (%s)" % (input_channel_fc, 'Polyline'))

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)

        #----------------------------------------------------------------------------------------
        # Check required fields
        log.info("check required fields in input data")
        missing_fields = []
        ovk_field = config.get('afvoervlakken', 'input_channel_ident')
        gpg_field = config.get('GENERAL', 'gpgident')

        if not turtlebase.arcgis.is_fieldname(gp, input_polygon_fc, gpg_field):
            log.error("missing field '%s' in %s" % (gpg_field, input_polygon_fc))
            missing_fields.append("%s: %s" % (input_polygon_fc, gpg_field))

        if not turtlebase.arcgis.is_fieldname(gp, input_channel_fc, ovk_field):
            log.error("missing field '%s' in %s" % (ovk_field, input_channel_fc))
            missing_fields.append("%s: %s" % (input_channel_fc, ovk_field))

        if len(missing_fields) > 0:
            log.error("missing fields: %s" % missing_fields)

        #----------------------------------------------------------------------------------------
        polygon_dict = nens.gp.get_table(gp, input_polygon_fc, primary_key=gpg_field.lower())

        #extract channels within polygon
        intersect_waterlijn = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Intersect_analysis([input_polygon_fc, input_channel_fc], intersect_waterlijn)

        polygon_list = []
        if not os.path.isdir(os.path.join(workspace, "voronoi_work")):
            os.makedirs(os.path.join(workspace, "voronoi_work"))
        counter = 0
        for k in polygon_dict.keys():
            counter += 1
            log.info("extract polygon %s" % k)

            huidig_peilgebied_lyr = "gpg_%s" % counter
            gp.MakeFeatureLayer(input_polygon_fc, huidig_peilgebied_lyr, "%s = '%s'" % (gpg_field, k))

            log.debug("extract polylines within %s" % k)

            huidige_waterlijn_lyr = "ovk_%s" % counter
            gp.MakeFeatureLayer(intersect_waterlijn, huidige_waterlijn_lyr, "%s = '%s'" % (gpg_field, k))

            #count records
            record_count = turtlebase.arcgis.fc_records(gp, huidige_waterlijn_lyr)
            log.debug(" - record count: %s" % record_count)

            if record_count > 1:
                log.info(" - create voronoi polygons")
                point_selection = turtlebase.voronoi.create_points(gp, huidige_waterlijn_lyr, ovk_field)

                log.info(" - create line_voronoi")
                result_dict = turtlebase.voronoi.create_line_voronoi(point_selection)

                log.info(" - create polygons")
                polygon_fc = turtlebase.voronoi.create_merged_polygons(result_dict, workspace_gdb)

                log.info(" - intersect line_voronoi polygons")
                output_intersect_fc = os.path.join(workspace, "voronoi_work", "voronoi_%s.shp" % counter)

                gp.Intersect_analysis(huidig_peilgebied_lyr + ";" + polygon_fc, output_intersect_fc)

                polygon_list.append(output_intersect_fc)

            elif record_count == 1:
                log.debug(" - 1 watergang in peilgebied, opknippen dus niet nodig, kopieer gpg")
                output_spatial_join = os.path.join(workspace, "voronoi_work", "out_sj_%s.shp" % counter)

                gp.SpatialJoin_analysis(huidig_peilgebied_lyr, huidige_waterlijn_lyr, output_spatial_join)
                polygon_list.append(output_spatial_join)
            else:
                log.warning(" - geen watergang aanwezig in peilgebied, peilgebied wordt in zijn geheel meegenomen")
                polygon_list.append(huidig_peilgebied_lyr)
                pass
        #----------------------------------------------------------------------------------------
        # Merge all polygons together
        merge_str = ";".join(polygon_list)
        
        fieldmappings = gp.createobject("FieldMappings")
        fldmap_OVK_ID = gp.createobject("FieldMap")

        for fc in polygon_list:
            try:
                fldmap_OVK_ID.AddInputField(fc, ovk_field)
            except:
                pass

        fieldmappings.AddFieldMap(fldmap_OVK_ID)

        if use_intersect_channel == 'true':
            gp.Merge_management(merge_str, output_afvoervlakken_shp, fieldmappings)
        else:
            temp_merge_fc = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            gp.Merge_management(merge_str, temp_merge_fc, fieldmappings)
            gp.dissolve_management(temp_merge_fc, output_afvoervlakken_shp, ovk_field)


        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase
        shutil.rmtree(os.path.join(workspace, "voronoi_work"))
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)
            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
Beispiel #9
0
def main(options, args):
    # Create the Geoprocessor object
    gp = arcgisscripting.create()
    gp.RefreshCatalog
    gp.OverwriteOutput = 1

    debuglogging()
    #----------------------------------------------------------------------------------------
    #create header for logfile
    log.info("")
    log.info("*********************************************************")
    log.info("Calculate bottomwidth for channels (manning)")
    log.info("This python script is developed by "
             + "Nelen & Schuurmans B.V. and is a part of 'Turtle'")
    log.info(version)
    log.debug('loading module (%s)' % __revision__)
    log.info("*********************************************************")
    log.info("arguments: %s" %(sys.argv))
    log.info("")

    #----------------------------------------------------------------------------------------
    # Check the settings for this script
    check_ini = turtlebase.general.missing_keys(options.ini, ["<vul hier de keys in uit de ini file>"])
    if len(check_ini) > 0:
        log.error("missing keys in turtle-settings.ini file (header %s)" % NAME_SCRIPT)
        log.error(check_ini)
        sys.exit(1)

    #----------------------------------------------------------------------------------------
    # Create workspace
    workspace = options.turtle_ini['location_temp']

    turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

    if not os.path.isdir(workspace):
        os.makedirs(workspace)
    workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
    if errorcode == 1:
        log.error("failed to create a file geodatabase in %s" % workspace)

    #----------------------------------------------------------------------------------------
    # Input parameters
    """
    nodig voor deze tool:
    """
    for argv in sys.argv[1:]:
        turtlebase.filenames.check_filename(argv)

    if len(sys.argv) == 7:
        input_channels = sys.argv[1]
        gauckler_manning_coefficient = sys.argv[2]  #1 / 22.5  # n
        conversion_constant = sys.argv[3]  #1  # k
        max_slope = sys.argv[4]  #0.000021  # S
        depth = sys.argv[5]  #0.8  # d
        talud = sys.argv[6]  #2  # T
    else:
        log.error("usage: <argument1> <argument2>")
        sys.exit(1)

    #----------------------------------------------------------------------------------------
    # Check geometry input parameters
    log.info("Check geometry of input parameters")
    geometry_check_list = []

    log.debug(" - check <input >: %s" % argument1)

    if not turtlebase.arcgis.is_file_of_type(gp, input_channels, 'Polyline'):
        log.error("%s is not a %s feature class!" % (input_channels, 'Polyline'))
        geometry_check_list.append("%s -> (%s)" % (input_channels, 'Polyline'))

    if len(geometry_check_list) > 0:
        log.error("check input: %s" % geometry_check_list)
        sys.exit(2)
    #----------------------------------------------------------------------------------------
    # Check required fields in input data
    log.info("Check required fields in input data")

    missing_fields = []

    #<check required fields from input data, append them to list if missing>
    check_fields = {}#check_fields = {input_1: [fieldname1, fieldname2], input_2: [fieldname1, fieldname2]}
    for input_fc,fieldnames in check_fields.items():
        for fieldname in fieldnames:
            if not turtlebase.arcgis.is_fieldname(gp, input_channels, "OVKIDENT"):
                errormsg = "fieldname %s not available in %s" % (fieldname, input_fc)
                log.error(errormsg)
                missing_fields.append(errormsg)

    if len(missing_fields) > 0:
        log.error("missing fields in input data: %s" % missing_fields)
        sys.exit(2)
    #----------------------------------------------------------------------------------------
    # Environments

    #----------------------------------------------------------------------------------------
    # uitlezen feature class
    channel_values = nens.gp.get_table(gp, input_channels, primary_key='ovkident')

    #wegschrijven feature class
    turtlebase.arcgis.write_result_to_output(output_table, output_ident, result_dict)

    #----------------------------------------------------------------------------------------
    #----------------------------------------------------------------------------------------
    #----------------------------------------------------------------------------------------
    #----------------------------------------------------------------------------------------
    #----------------------------------------------------------------------------------------
    #----------------------------------------------------------------------------------------
    # Delete temporary workspace geodatabase & ascii files
    try:
        log.debug("delete temporary workspace: %s" % workspace_gdb)
        gp.delete(workspace_gdb)

        log.info("workspace deleted")
    except:
        log.warning("failed to delete %s" % workspace_gdb)

    log.info("*********************************************************")
    log.info("Finished")
    log.info("*********************************************************")

    del gp
    pass
Beispiel #10
0
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        #check inputfields
        log.info("Getting commandline parameters... ")
        #use_onderbemalingen = False
        if len(sys.argv) == 6:
            input_peilgebiedgegevens = sys.argv[1]
            input_toetspunten = sys.argv[2]
            input_resultaten = sys.argv[3]
            output_table = sys.argv[4]
            output_csv = sys.argv[5]
            use_csv = not(output_csv == '#')
        else:
            log.error("Usage: python rural_naverwerking.py <peilvakgegevens table> <toetspunten_table> <resultaten_csv> <output_table> <output_csv>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        #check input parameters
        log.info('Checking presence of input files... ')
        if not(use_csv):
            log.warning("no output has been defined, output will be written to temp workspace")
        if not(gp.exists(input_toetspunten)):
            log.error("input_toetspunten "+input_toetspunten+" does not exist!")
            sys.exit(5)
        if not(gp.exists(input_resultaten)):
            log.error("input_resultaten "+input_resultaten+" does not exist!")
            sys.exit(5)

        log.info('input parameters checked... ')

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL','location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        log.info("A-1) Reading peilgebiedgegevens... ")
        gpgident = config.get('GENERAL', 'gpgident')
        peilgebied_dict = nens.gp.get_table(gp, input_peilgebiedgegevens, primary_key=gpgident.lower())

        log.info("A-2) Converting toetspunten to csv")
        toetspunten_csv = os.path.join(workspace, "nav_toets.csv")
        nav_toetspunten = nens.gp.join_on_primary_key(gp, peilgebied_dict, input_toetspunten, gpgident.lower())

        turtlebase.arcgis.convert_dict_to_csv(nav_toetspunten, toetspunten_csv)

        log.info("A-3) Preparing hymstat csv")
        hymstat_csv = os.path.join(workspace, "nav_hym.csv")
        #turtlebase.arcgis.convert_table_to_csv(gp, input_resultaten, hymstat_csv)
        shutil.copyfile(input_resultaten, hymstat_csv)

        #prepare naverwerking ini file
        log.info("B-1) Reading naverwerkingstool.ini... ")
        location_script = os.path.dirname(sys.argv[0])
        nav_config = mainutils.read_config(__file__, config.get('GENERAL', 'filename_naverwerking_ini'))
        configfilename = os.path.join(location_script, config.get('GENERAL', 'filename_naverwerking_ini'))

        nav_config.set('GENERAL', 'CSVTOETSPUNTEN', toetspunten_csv) #input_toetspunten
        nav_config.set('GENERAL', 'CSVINPUT1', hymstat_csv)

        #image output of naverkingstool will go to the same outputdir as the csv!! So if csv output is selected,
        #we MUST use that output-csv as the intermediate csv too
        if use_csv:
            log.info(" - using csv")
            if not output_csv.endswith('.csv'):
                output_csv += '.csv'
            nav_config.set('GENERAL', 'CSVOUTPUT1', output_csv)
        else:
            log.info(" - not using csv")
            output_csv = os.path.join(workspace, "nav_output.csv")
            nav_config.set('GENERAL', 'CSVOUTPUT1', output_csv)

        nav_config.set('GENERAL', 'CSVINPUT2', '')
        nav_config.set('GENERAL', 'CSVOUTPUT2', '')
        configfile = open(configfilename, "wb")
        nav_config.write(configfile)
        configfile.close()

        #----------------------------------------------------------------------------------------
        #call naverwerkingstool
        arguments = ""

        #change working path to exe directory
        os.chdir(location_script)

        #execute external program gridbewerking
        log.info("Naverwerking calculation")

        import subprocess
        naverwerking_exe = config.get('GENERAL', 'filename_naverwerking_exe')
        child = subprocess.Popen(os.path.join(location_script, naverwerking_exe) + arguments)
        child.wait()
        log.info("naverwerking.exe succesfully executed")

        """
        HIERONDER ALLES HERSCHRIJVEN
        """
        #----------------------------------------------------------------------------------------
        #post: write to database, table and/or csv
        log.info("C-1) Reading output csv")
        data_set = csv.DictReader(file(output_csv))

        #name is same as key is nothing is given; key is columnname from csv
        #alle velden die niet hier voorkomen, hoeven niet van naam worden veranderd en zijn van het type "long", precision 10, scale 5
        naverwerkingFields = {\
            gpgident: {"NAME": gpgident, "TYPE": "TEXT", "PRECISION": "10", "SCALE": "5", "LENGTH": "50"},\
            "X0": {"NAME": "X0", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "B": {"NAME": "B", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_2": {"NAME": "WS_2", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_5": {"NAME": "WS_5", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_10": {"NAME": "WS_10", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_15": {"NAME": "WS_15", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_20": {"NAME": "WS_20", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_25": {"NAME": "WS_25", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_50": {"NAME": "WS_50", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "WS_100": {"NAME": "WS_100", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_I_S": {"NAME": "STA_TP_I_ST", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_I_H": {"NAME": "STA_TP_I_HL", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_I_A": {"NAME": "STA_TP_I_AK", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_I_G": {"NAME": "STA_TP_I_GR", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_O_S": {"NAME": "STA_TP_O_ST", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_O_H": {"NAME": "STA_TP_O_HL", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_O_A": {"NAME": "STA_TP_O_AK", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "STA_TP_O_G": {"NAME": "STA_TP_O_GR", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "T_I": {"NAME": "T_I", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "T_O": {"NAME": "T_O", "TYPE": "DOUBLE", "PRECISION": "10", "SCALE": "5"},\
            "RSLT_Bron": {"NAME": "RSLT_Bron", "TYPE": "TEXT", "LENGTH": "50", "PRECISION": "10", "SCALE": "5"},\
            "RSLT_Datum": {"NAME": "RSLT_Datum", "TYPE": "DATE", "PRECISION": "10", "SCALE": "5"},\
            }

        #convert columnnames in data_set
        data_set_converted = {}
        source_str = "hymstat: %s" % os.path.basename(input_resultaten)
        if len(source_str) > 50:
            source_str = source_str[:50]
        import time
        date_str = time.strftime('%x')

        for row in data_set:
            peilgebied_id = row['PEILVAKID']
            data_set_converted[peilgebied_id] = {gpgident: peilgebied_id}
            for key in row.keys():
                if key in naverwerkingFields:
                    data_set_converted[peilgebied_id][naverwerkingFields[key]["NAME"]] = row[key]

            data_set_converted[peilgebied_id]["RSLT_Bron"] = source_str
            data_set_converted[peilgebied_id]["RSLT_Datum"] = date_str

        #----------------------------------------------------------------------------------------
        #check if output_table exists. if not, create with correct rows
        log.info("C-2) Checking output table... ")
        if not(gp.exists(output_table)):
            gp.CreateTable(os.path.dirname(output_table), os.path.basename(output_table))

        #----------------------------------------------------------------------------------------
        #for key,row in naverwerkingFields.items():
        #	print row["NAME"]+" "+row["TYPE"]+" "+row["PRECISION"]+" "+row["SCALE"]
        #check if output_table has the correct rows
        log.info("C-3) Checking fields")
        for field_name, field_settings in naverwerkingFields.items():
            if not turtlebase.arcgis.is_fieldname(gp, output_table, field_settings['NAME']):
                if field_settings['TYPE'] == 'DOUBLE':
                    gp.AddField(output_table, field_settings['NAME'], field_settings['TYPE'], field_settings['PRECISION'], field_settings['SCALE'])
                elif field_settings['TYPE'] == 'TEXT':
                    gp.AddField(output_table, field_settings['NAME'], field_settings['TYPE'], '#', '#', field_settings['LENGTH'])
                else:
                    gp.AddField(output_table, field_settings['NAME'], field_settings['TYPE'], field_settings['PRECISION'], field_settings['SCALE'])

        # ---------------------------------------------------------------------------
        #add data to file_output
        turtlebase.arcgis.write_result_to_output(output_table, gpgident, data_set_converted)

        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        if os.path.isfile(toetspunten_csv):
            os.remove(toetspunten_csv)
        if os.path.isfile(hymstat_csv):
            os.remove(hymstat_csv)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
                log.error("failed to create a file geodatabase in %s" % workspace)
        # Input parameters
        if len(sys.argv) == 11:
            # input parameters
            input_voronoi_polygon = sys.argv[1]
            input_rrcf_waterlevel = sys.argv[2]
            input_ahn_raster = sys.argv[3]
            input_lgn_raster = sys.argv[4]
            input_lgn_conversion = sys.argv[5]

            # output parameters
            output_result_table = sys.argv[6]

            # optional output
            output_inundation = sys.argv[7]
            if output_inundation == "#":
                output_inundation = os.path.join(workspace_gdb, "inun_nbw")

            if len(os.path.basename(output_inundation)) > 13:
                log.error("filename raster output (%s) cannot contain more than 13 characters" % os.path.basename(output_inundation))
                sys.exit(1)

            output_waterdamage = sys.argv[8]
            if output_waterdamage == "#":
                output_waterdamage = os.path.join(workspace_gdb, "damage_nbw")

            if len(os.path.basename(output_waterdamage)) > 13:
                log.error("filename raster output (%s) cannot contain more than 13 characters" % os.path.basename(output_waterdamage))
                sys.exit(1)

            output_inundation_total = sys.argv[9]
            if len(os.path.basename(output_inundation_total)) > 13:
                log.error("filename raster output (%s) cannot contain more than 13 characters" % os.path.basename(output_inundation_total))
                sys.exit(1)

            output_waterdamage_total = sys.argv[10]
            if len(os.path.basename(output_waterdamage_total)) > 13:
                log.error("filename raster output (%s) cannot contain more than 13 characters" % os.path.basename(output_waterdamage_total))
                sys.exit(1)

        else:
            log.error("usage: <input_voronoi_polygon> <input_rrcf_waterlevel> <input_ahn_raster> \
            <input_lgn_raster> <input_lgn_conversion> <output_result_table> \
            <output_inundation> <output_waterdamage> <output inundation total> <output waterdamage total>")
            sys.exit(1)
        #----------------------------------------------------------------------------------------
        temp_voronoi = os.path.join(workspace_gdb, "voronoi")
        gp.select_analysis(input_voronoi_polygon, temp_voronoi)

        # Check geometry input parameters
        cellsize = gp.describe(input_ahn_raster).MeanCellHeight

        log.info("Check geometry of input parameters")
        geometry_check_list = []

        if input_lgn_conversion != "#":
            if not gp.exists(input_lgn_conversion):
                errormsg = "%s does not exist" % input_lgn_conversion
                log.error(errormsg)
                geometry_check_list.append(errormsg)

        log.debug(" - check voronoi polygon: %s" % temp_voronoi)
        if gp.describe(temp_voronoi).ShapeType != 'Polygon':
            log.error("Input voronoi is not a polygon feature class!")
            geometry_check_list.append(temp_voronoi + " -> (Polygon)")

        log.debug(" - check ahn raster %s" % input_ahn_raster)
        if gp.describe(input_ahn_raster).DataType != 'RasterDataset':
            log.error("Input AHN is not a raster dataset")
            sys.exit(1)

        if gp.describe(input_ahn_raster).PixelType[0] not in ['U', 'S']:
            log.error("Input AHN is a floating point raster, for this script an integer is nessecary")
            geometry_check_list.append(input_ahn_raster + " -> (Integer)")

        log.debug(" - check lgn raster %s" % input_lgn_raster)
        if gp.describe(input_lgn_raster).DataType != 'RasterDataset':
            log.error("Input LGN is not a raster dataset")
            sys.exit(1)

        if gp.describe(input_lgn_raster).PixelType[0] not in ['U', 'S']:
            log.error("Input LGN is a floating point raster, for this script an integer is nessecary")
            geometry_check_list.append(input_lgn_raster + " -> (Integer)")

        if gp.describe(input_lgn_raster).MeanCellHeight != float(cellsize):
            log.error("Cell size of LGN is %s, must be %s" % (
                                    gp.describe(input_lgn_raster).MeanCellHeight, cellsize))
            geometry_check_list.append(input_lgn_raster + " -> (Cellsize %s)" % cellsize)

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)
        #----------------------------------------------------------------------------------------
        # Check required fields in database
        log.info("Check required fields in input data")
        # create return period list
        return_periods = config.get('naverwerking_rrcf', 'herhalingstijden').split(", ")
        log.debug(" - return periods: %s" % return_periods)

        missing_fields = []

        for return_period in return_periods:
            if not turtlebase.arcgis.is_fieldname(gp, input_rrcf_waterlevel, "WS_%s" % return_period):
                log.debug(" - missing: %s in %s" % ("WS_%s" % return_period, input_rrcf_waterlevel))
                missing_fields.append("%s: %s" % (input_rrcf_waterlevel, "WS_%s" % return_period))

        #<check required fields from input data, append them to list if missing>"
        field_streefpeil = config.get('naverwerking_rrcf', 'field_streefpeil')
        check_fields = {input_rrcf_waterlevel: [config.get('naverwerking_rrcf', 'calculation_point_ident'), field_streefpeil]}
        if input_lgn_conversion != "#":
            check_fields[input_lgn_conversion] = [config.get('naverwerking_rrcf', 'lgn_conv_ident'),
                                                    config.get('naverwerking_rrcf', 'input_field_k5')]
        for input_fc, fieldnames in check_fields.items():
            for fieldname in fieldnames:
                if not turtlebase.arcgis.is_fieldname(gp, input_fc, fieldname):
                    errormsg = "fieldname %s not available in %s" % (fieldname, input_fc)
                    log.error(errormsg)
                    missing_fields.append(errormsg)

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #---------------------------------------------------------------------
        # Environments
        log.info("Set environments")
        gp.extent = gp.describe(temp_voronoi).extent  # use extent from LGN

        #---------------------------------------------------------------------
        # read waterlevel table as a dictionary
        log.info("Read waterlevel table")
        waterlevel_dict = nens.gp.get_table(gp, input_rrcf_waterlevel, primary_key=config.get('naverwerking_rrcf', 'calculation_point_ident').lower())
        log.debug(waterlevel_dict)

        # Add fields to output
        for return_period in return_periods:
            if not turtlebase.arcgis.is_fieldname(gp, temp_voronoi, "WS_%s" % return_period):
                log.info(" - add field WS_%s" % return_period)
                gp.addfield(temp_voronoi, "WS_%s" % return_period, "double")

        if not turtlebase.arcgis.is_fieldname(gp, temp_voronoi, field_streefpeil):
                log.info(" - add field %s" % field_streefpeil)
                gp.addfield(temp_voronoi, field_streefpeil, "double")

        # copy waterlevel to voronoi polygons
        rows = gp.UpdateCursor(temp_voronoi)
        for row in nens.gp.gp_iterator(rows):
            row_id = row.GetValue(config.get('naverwerking_rrcf', 'calculation_point_ident'))
            if row_id in waterlevel_dict:
                log.debug(waterlevel_dict[row_id])
                for return_period in return_periods:
                    row.SetValue("WS_%s" % return_period, waterlevel_dict[row_id]['ws_%s' % return_period])
                    row.SetValue(field_streefpeil,
                                 waterlevel_dict[row_id][field_streefpeil.lower()])
                rows.UpdateRow(row)

        #---------------------------------------------------------------------
        # Create waterlevel rasters
        log.info("Create rasters for waterlevels")
        for return_period in return_periods:
            log.info(" - create raster for ws_%s" % return_period)
            out_raster_dataset = workspace_gdb + "/ws_%s" % return_period
            gp.FeatureToRaster_conversion(temp_voronoi, "WS_%s" % return_period, out_raster_dataset, cellsize)

        #---------------------------------------------------------------------
        # Create target level raster
        log.info("Create targetlevel raster")
        out_raster_targetlevel = os.path.join(workspace_gdb, "targetlv")
        gp.FeatureToRaster_conversion(temp_voronoi, field_streefpeil, out_raster_targetlevel, cellsize)

        #---------------------------------------------------------------------
        # Create freeboard raster
        log.info("Create freeboard raster")

        # create ahn ascii
        ahn_ascii = os.path.join(workspace, "ahn.asc")
        log.debug("ahn ascii: %s" % ahn_ascii)
        gp.RasterToASCII_conversion(input_ahn_raster, ahn_ascii)

        targetlevel_ascii = os.path.join(workspace, "targetlvl.asc")
        log.debug("targetlevel ascii: %s" % targetlevel_ascii)
        gp.RasterToASCII_conversion(out_raster_targetlevel, targetlevel_ascii)

        freeboard_ascii = os.path.join(workspace, "freeboard.asc")
        turtlebase.spatial.create_freeboard_raster(ahn_ascii, targetlevel_ascii, freeboard_ascii)

        #----------------------------------------------------------------------------------------
        # Create K5 LGN
        log.info("Reclass LGN to K5 raster")
        lgn_ascii = os.path.join(workspace, "lgn.asc")
        lgn_k5_ascii = os.path.join(workspace, "lgn_k5.asc")

        gp.RasterToASCII_conversion(input_lgn_raster, lgn_ascii)

        if input_lgn_conversion != '#':
            reclass_dict = nens.gp.get_table(gp, input_lgn_conversion,
                                             primary_key=config.get('naverwerking_rrcf', 'lgn_conv_ident').lower())
            turtlebase.spatial.reclass_lgn_k5(lgn_ascii, lgn_k5_ascii, reclass_dict)
        else:
            turtlebase.spatial.reclass_lgn_k5(lgn_ascii, lgn_k5_ascii)

        #----------------------------------------------------------------------------------------
        # Create inundation raster
        # als ws_ > ahn, dan inundatie
        inundation_raster_list = []
        inundation_total_raster_list = []

        log.info("Create inundation rasters")
        # inundatie stedelijk
        return_period_urban = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_stedelijk')
        if config.get('naverwerking_rrcf', 'percentage_inundatie_stedelijk') != "-":
            log.info(" - create inundation urban")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_urban)
            if gp.exists(waterlevel):
                inundation_urban = os.path.join(workspace, "inun_urban.asc")
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel, 1,
                                                            return_period_urban, inundation_urban, workspace, use_lgn=True)
                inundation_raster_list.append(inundation_urban)
                if output_inundation_total != '#':
                    # Inundation without lgn
                    inundation_total_urban = os.path.join(workspace, "inun_total_urban.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               1, return_period_urban, inundation_total_urban, workspace, use_lgn=False)
                    inundation_total_raster_list.append(inundation_total_urban)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # inundatie hoogwaardige landbouw
        return_period_agriculture = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_hoogwaardig')
        if config.get('naverwerking_rrcf', 'percentage_inundatie_hoogwaardig') != "-":
            log.info(" - create inundation agriculture")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_agriculture)
            if gp.exists(waterlevel):
                # Inundation with lgn
                inundation_agriculture = os.path.join(workspace, "inun_agri.asc")
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                           2, return_period_agriculture, inundation_agriculture, workspace, use_lgn=True)
                inundation_raster_list.append(inundation_agriculture)
                if output_inundation_total != '#':
                    # Inundation without lgn
                    inundation_total_agriculture = os.path.join(workspace, "inun_total_agri.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               2, return_period_agriculture, inundation_total_agriculture, workspace, use_lgn=False)
                    inundation_total_raster_list.append(inundation_total_agriculture)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # inundatie akkerbouw
        return_period_rural = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_akker')
        if config.get('naverwerking_rrcf', 'percentage_inundatie_akker') != "-":
            log.info(" - create inundation rural")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_rural)
            if gp.exists(waterlevel):
                inundation_rural = os.path.join(workspace, "inun_rural.asc")
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                           3, return_period_rural, inundation_rural, workspace, use_lgn=True)
                inundation_raster_list.append(inundation_rural)
                if output_inundation_total != '#':
                    # Inundation without lgn
                    inundation_total_rural = os.path.join(workspace, "inun_total_rural.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               3, return_period_rural, inundation_total_rural, workspace, use_lgn=False)
                    inundation_total_raster_list.append(inundation_total_rural)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # inundatie grasland
        return_period_grass = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_grasland')
        if config.get('naverwerking_rrcf', 'percentage_inundatie_grasland') != "-":
            log.info(" - create inundation grass")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_grass)
            if gp.exists(waterlevel):
                log.debug("waterlevel grasland = %s" % waterlevel)
                inundation_grass = os.path.join(workspace, "inun_grass.asc")
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                           4, return_period_grass, inundation_grass, workspace, use_lgn=True)
                inundation_raster_list.append(inundation_grass)
                if output_inundation_total != '#':
                    # Inundation without lgn
                    inundation_total_grass = os.path.join(workspace, "inun_total_grass.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               4, return_period_grass, inundation_total_grass, workspace, use_lgn=False)
                    inundation_total_raster_list.append(inundation_total_grass)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        if len(inundation_raster_list) > 1:
            log.info("Merge inundation rasters")
            output_inundation_exists = turtlebase.spatial.merge_ascii(inundation_raster_list, output_inundation, workspace)
        else:
            log.error("there are no inundation rasters available")

        if len(inundation_total_raster_list) > 1:
            log.info("Merge inundation total rasters")
            turtlebase.spatial.merge_ascii(inundation_total_raster_list, output_inundation_total, workspace)

        #----------------------------------------------------------------------------------------
        # Create waterdamage raster
        # als ws_ > freeboard, dan overlast
        damage_raster_list = []
        damage_total_raster_list = []

        log.info("Create waterdamage rasters")
        # overlast stedelijk
        return_period_urban_damage = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_stedelijk')
        if config.get('naverwerking_rrcf', 'percentage_overlast_stedelijk') != "-":
            log.info(" - create waterdamage urban")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_urban_damage)
            if gp.exists(waterlevel):
                damage_urban = os.path.join(workspace, "damage_urban.asc")
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, freeboard_ascii, waterlevel,
                                                           1, return_period_urban_damage, damage_urban, workspace, use_lgn=True)
                damage_raster_list.append(damage_urban)
                if output_waterdamage_total != '#':
                    # Waterdamage without lgn
                    damage_total_urban = os.path.join(workspace, "damage_total_urban.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               1, return_period_urban_damage, damage_total_urban, workspace, use_lgn=False)
                    damage_total_raster_list.append(damage_total_urban)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # overlast hoogwaardig
        return_period_agriculture_damage = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_hoogwaardig')
        if config.get('naverwerking_rrcf', 'percentage_overlast_hoogwaardig') != "-":
            log.info(" - create waterdamage agriculture")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_agriculture_damage)
            if gp.exists(waterlevel):
                damage_agriculture = workspace + "/damage_agri_%s.asc" % return_period_agriculture_damage
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, freeboard_ascii, waterlevel,
                                                           2, return_period_agriculture_damage, damage_agriculture, workspace, use_lgn=True)
                damage_raster_list.append(damage_agriculture)
                if output_waterdamage_total != '#':
                    # Waterdamage without lgn
                    damage_total_agriculture = os.path.join(workspace, "damage_total_agri.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               1, return_period_agriculture_damage, damage_total_agriculture, workspace, use_lgn=False)
                    damage_total_raster_list.append(damage_total_agriculture)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # overlast akker
        return_period_rural_damage = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_akker')
        if config.get('naverwerking_rrcf', 'percentage_overlast_akker') != "-":
            log.info(" - create waterdamage rural")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_rural_damage)
            if gp.exists(waterlevel):
                damage_rural = workspace + "/damage_rural_%s.asc" % return_period_rural_damage
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, freeboard_ascii, waterlevel,
                                                           3, return_period_rural_damage, damage_rural, workspace, use_lgn=True)
                damage_raster_list.append(damage_rural)
                if output_waterdamage_total != '#':
                    # Waterdamage without lgn
                    damage_total_rural = os.path.join(workspace_gdb, "damage_total_rural.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               1, return_period_rural_damage, damage_total_rural, workspace, use_lgn=False)
                    damage_total_raster_list.append(damage_total_rural)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # overlast grasland
        return_period_grass_damage = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_grasland')
        if config.get('naverwerking_rrcf', 'percentage_overlast_grasland') != "-":
            log.info(" - create waterdamage grass")
            waterlevel = "%s/ws_%s" % (workspace_gdb, return_period_grass_damage)
            if gp.exists(waterlevel):
                damage_grass = os.path.join(workspace_gdb, "damage_grass.asc")
                turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, freeboard_ascii, waterlevel,
                                                           4, return_period_grass_damage, damage_grass, workspace, use_lgn=True)
                damage_raster_list.append(damage_grass)
                if output_waterdamage_total != '#':
                    # Waterdamage without lgn
                    damage_total_grass = os.path.join(workspace_gdb, "damage_total_grass.asc")
                    turtlebase.spatial.create_inundation_raster(lgn_k5_ascii, ahn_ascii, waterlevel,
                                                               1, return_period_grass_damage, damage_total_grass, workspace, use_lgn=False)
                    damage_total_raster_list.append(damage_total_grass)
            else:
                log.error("%s does not exists! check ini-file and tempfolder" % waterlevel)

        # Merge waterdamage rasters
        if len(damage_raster_list) > 1:
            log.info("Merge waterdamage rasters")
            output_waterdamage_exists = turtlebase.spatial.merge_ascii(damage_raster_list, output_waterdamage, workspace)
        else:
            log.error("there are no waterdamage rasters available")

        if len(damage_total_raster_list) > 1:
            log.info("Merge waterdamage total rasters")
            turtlebase.spatial.merge_ascii(damage_total_raster_list, output_waterdamage_total, workspace)
        #----------------------------------------------------------------------------------------
        # calculate percentage inundation
        """
        input:
        - inundatie / overlast (raster dataset)
        - input_voronoi_polygon (met GPGIDENT) (feature class)
        - lgn_k5 (raster dataset)
        """
        gpgident_field = config.get('General', 'gpgident')
        # dissolve voronoi based on gpgident

        log.debug("dissolve voronoi polygons, based on gpgident")
        temp_fc_gpgident = os.path.join(workspace_gdb, "temp_fc_gpgident")
        gp.Dissolve_management(temp_voronoi, temp_fc_gpgident, gpgident_field)

        # Calculate area total, gpgident
        if not turtlebase.arcgis.is_fieldname(gp, temp_fc_gpgident, "area_total"):
            gp.addfield(temp_fc_gpgident, "area_total", "Double")
        turtlebase.arcgis.calculate_area(gp, temp_fc_gpgident, "area_total")

        gpgident_dict = nens.gp.get_table(gp, temp_fc_gpgident, primary_key=gpgident_field.lower())
        log.debug("gpgident_dict: %s" % gpgident_dict)

        # create feature class from lgn k5 ascii
        output_reclass_lgn = os.path.join(workspace_gdb, "reclass_lgn")
        gp.ASCIIToRaster_conversion(lgn_k5_ascii, output_reclass_lgn)
        temp_fc_lgn = os.path.join(workspace_gdb, "fc_lgn")
        gp.RasterToPolygon_conversion(output_reclass_lgn, temp_fc_lgn, "NO_SIMPLIFY")

        # union lgn with gpg-areas
        temp_fc_union_lgn = os.path.join(workspace_gdb, "fc_union_lgn")
        gp.Union_analysis(temp_fc_gpgident + ";" + temp_fc_lgn, temp_fc_union_lgn)
        dissolve_lyr = turtlebase.arcgis.get_random_layer_name()
        gp.MakeFeatureLayer_management(temp_fc_union_lgn, dissolve_lyr, "%s <> ''" % gpgident_field)
        temp_fc_dissolve_lgn = os.path.join(workspace_gdb, "dissolve_lgn")
        if turtlebase.arcgis.is_fieldname(gp, dissolve_lyr, "GRIDCODE"):
            gp.Dissolve_management(dissolve_lyr, temp_fc_dissolve_lgn, "%s; GRIDCODE" % gpgident_field)
            gridcode = "gridcode"
        elif turtlebase.arcgis.is_fieldname(gp, dissolve_lyr, "grid_code"):
            gp.Dissolve_management(dissolve_lyr, temp_fc_dissolve_lgn, "%s; grid_code" % gpgident_field)
            gridcode = "grid_code"
        else:
            log.error("no field GRIDCODE or grid_code available in %s" % dissolve_lyr)
            sys.exit(2)

        # Calculate area lgn
        if not turtlebase.arcgis.is_fieldname(gp, temp_fc_dissolve_lgn, "area_lgn"):
            gp.addfield(temp_fc_dissolve_lgn, "area_lgn", "Double")
        turtlebase.arcgis.calculate_area(gp, temp_fc_dissolve_lgn, "area_lgn")

        lgn_dict = nens.gp.get_table(gp, temp_fc_dissolve_lgn)
        translate_lgn_dict = translate_dict(lgn_dict, gridcode, 'area_lgn')
        log.debug("translate_lgn_dict: %s" % translate_lgn_dict)

        # Create feature class from inundation_grid
        """ values: 10, 25, 50, 100"""
        if output_inundation_exists == 0:
            temp_fc_inundation = os.path.join(workspace_gdb, "inundation")
            log.info(output_inundation)
            gp.RasterToPolygon_conversion(output_inundation, temp_fc_inundation, "NO_SIMPLIFY")
            temp_fc_union_inundation = os.path.join(workspace_gdb, "union_inun")
            gp.Union_analysis(temp_fc_dissolve_lgn + ";" + temp_fc_inundation, temp_fc_union_inundation)
            dissolve_inundation_lyr = turtlebase.arcgis.get_random_layer_name()
            if turtlebase.arcgis.is_fieldname(gp, temp_fc_union_inundation, "GRIDCODE_1"):
                gp.MakeFeatureLayer_management(temp_fc_union_inundation, dissolve_inundation_lyr, "GRIDCODE_1 > 0")
                gridcode_1 = "gridcode_1"
            elif turtlebase.arcgis.is_fieldname(gp, temp_fc_union_inundation, "GRID_CODE1"):
                gp.MakeFeatureLayer_management(temp_fc_union_inundation, dissolve_inundation_lyr, "GRID_CODE1 > 0")
                gridcode_1 = "grid_code1"
            elif turtlebase.arcgis.is_fieldname(gp, temp_fc_union_inundation, "GRID_CODE_1"):
                gp.MakeFeatureLayer_management(temp_fc_union_inundation, dissolve_inundation_lyr, "GRID_CODE_1 > 0")
                gridcode_1 = "grid_code_1"
            else:
                log.error("No field available named gridcode_1 or grid_code1")
                log.warning(nens.gp.get_table_def(gp, temp_fc_union_inundation))
                sys.exit(1)
            temp_fc_dissolve_inundation = os.path.join(workspace_gdb, "dissolve_inun")
            dissolve_string = "%s;%s;%s" % (gpgident_field.upper(), gridcode, gridcode_1)
            log.debug(" - dissolve layer: %s" % dissolve_inundation_lyr)
            gp.Dissolve_management(dissolve_inundation_lyr, temp_fc_dissolve_inundation, dissolve_string)

            # Calculate area inundation
            if not turtlebase.arcgis.is_fieldname(gp, temp_fc_dissolve_inundation, "area_inund"):
                gp.addfield(temp_fc_dissolve_inundation, "area_inun", "Double")
            turtlebase.arcgis.calculate_area(gp, temp_fc_dissolve_inundation, "area_inun")

            inundation_dict = nens.gp.get_table(gp, temp_fc_dissolve_inundation)
            translate_inundation_dict = translate_dict(inundation_dict, gridcode_1, 'area_inun')
            log.debug("translate_inundation_dict: %s" % translate_inundation_dict)
        else:
            translate_inundation_dict = {}

        # Create feature class from waterdamage grid
        """ values: 10, 15, 25"""
        if output_waterdamage_exists == 0:
            try:
                temp_fc_waterdamage = os.path.join(workspace_gdb, "damage")
                gp.RasterToPolygon_conversion(output_waterdamage, temp_fc_waterdamage, "NO_SIMPLIFY")
                waterdamage = True
            except:
                log.warning("waterdamage raster is empty")
                waterdamage = False

            if waterdamage:
                temp_fc_union_waterdamage = os.path.join(workspace_gdb, "damage_union")
                gp.Union_analysis(temp_fc_dissolve_lgn + ";" + temp_fc_waterdamage, temp_fc_union_waterdamage)

                dissolve_waterdamage_lyr = turtlebase.arcgis.get_random_layer_name()
                gp.MakeFeatureLayer_management(temp_fc_union_waterdamage, dissolve_waterdamage_lyr, "%s > 0" % gridcode_1)

                temp_fc_dissolve_waterdamage = os.path.join(workspace_gdb, "dissolve_damage")
                gp.Dissolve_management(dissolve_waterdamage_lyr, temp_fc_dissolve_waterdamage, "%s; %s; %s" % (gpgident_field, gridcode, gridcode_1))

                # Calculate area waterdamage
                if not turtlebase.arcgis.is_fieldname(gp, temp_fc_dissolve_waterdamage, "area_damag"):
                    gp.addfield(temp_fc_dissolve_waterdamage, "area_damag", "Double")
                turtlebase.arcgis.calculate_area(gp, temp_fc_dissolve_waterdamage, "area_damag")

                waterdamage_dict = nens.gp.get_table(gp, temp_fc_dissolve_waterdamage)
                translate_waterdamage_dict = translate_dict(waterdamage_dict, gridcode_1, 'area_damag')
                log.debug("translate_waterdamage_dict: %s" % translate_waterdamage_dict)
            else:
                translate_waterdamage_dict = {}
        else:
            translate_waterdamage_dict = {}

        no_data_value = float(config.get('naverwerking_rrcf', 'no_data_value'))
        result_dict = {}
        log.info("Calculating results")
        for gpgident, fields in gpgident_dict.items():
            # area_total
            #area_total = fields['area_total']

            #set defaults
            percentage_inundation_urban = no_data_value
            percentage_inundation_agriculture = no_data_value
            percentage_inundation_rural = no_data_value
            percentage_inundation_grass = no_data_value
            toetsing_inundation_urban = 9
            toetsing_inundation_agriculture = 9
            toetsing_inundation_rural = 9
            toetsing_inundation_grass = 9

            percentage_waterdamage_urban = no_data_value
            percentage_waterdamage_agriculture = no_data_value
            percentage_waterdamage_rural = no_data_value
            percentage_waterdamage_grass = no_data_value
            toetsing_waterdamage_urban = 9
            toetsing_waterdamage_agriculture = 9
            toetsing_waterdamage_rural = 9
            toetsing_waterdamage_grass = 9

            if gpgident in translate_inundation_dict:
                log.debug("Calculate percentage inundation for %s" % gpgident)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_stedelijk')
                toetsing_perc = config.get('naverwerking_rrcf', 'percentage_inundatie_stedelijk')
                toetsing_inundation_urban, percentage_inundation_urban = calculate_toetsing(translate_inundation_dict,
                                                                                            gpgident, 1, translate_lgn_dict,
                                                                                            hhtijd, toetsing_perc, no_data_value)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_hoogwaardig')
                toetsing_perc = config.get('naverwerking_rrcf', 'percentage_inundatie_hoogwaardig')
                toetsing_inundation_agriculture, percentage_inundation_agriculture = calculate_toetsing(translate_inundation_dict,
                                                                                                        gpgident, 2, translate_lgn_dict,
                                                                                                        hhtijd, toetsing_perc, no_data_value)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_akker')
                toetsing_perc = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_akker')
                toetsing_inundation_rural, percentage_inundation_rural = calculate_toetsing(translate_inundation_dict, gpgident,
                                                               3, translate_lgn_dict, hhtijd,
                                                               toetsing_perc, no_data_value)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_inundatie_grasland')
                toetsing_perc = config.get('naverwerking_rrcf', 'percentage_inundatie_grasland')
                toetsing_inundation_grass, percentage_inundation_grass = calculate_toetsing(translate_inundation_dict, gpgident,
                                                               4, translate_lgn_dict, hhtijd,
                                                               toetsing_perc, no_data_value)

            if gpgident in translate_waterdamage_dict:
                log.debug("Calculate percentage waterdamage for %s" % gpgident)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_stedelijk')
                toetsing_perc = config.get('naverwerking_rrcf', 'percentage_overlast_stedelijk')
                toetsing_waterdamage_urban, percentage_waterdamage_urban = calculate_toetsing(translate_inundation_dict, gpgident,
                                                                                              1, translate_lgn_dict, hhtijd,
                                                                                              toetsing_perc, no_data_value)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_hoogwaardig')
                toetsing_perc = config.get('naverwerking_rrcf', 'percentage_overlast_hoogwaardig')
                toetsing_waterdamage_agriculture, percentage_waterdamage_agriculture = calculate_toetsing(translate_inundation_dict, gpgident,
                                                                                                          2, translate_lgn_dict, hhtijd,
                                                                                                          toetsing_perc, no_data_value)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_akker')
                toetsing_perc = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_akker')
                toetsing_inundation_rural, percentage_waterdamage_rural = calculate_toetsing(translate_inundation_dict, gpgident,
                                                                                             3, translate_lgn_dict, hhtijd,
                                                                                             toetsing_perc, no_data_value)

                hhtijd = config.get('naverwerking_rrcf', 'herhalingstijd_overlast_grasland')
                toetsing_perc = config.get('naverwerking_rrcf', 'percentage_overlast_grasland')
                toetsing_inundation_grass, percentage_waterdamage_grass = calculate_toetsing(translate_inundation_dict, gpgident,
                                                                                             4, translate_lgn_dict, hhtijd,
                                                                                             toetsing_perc, no_data_value)

            result_dict[gpgident] = {
                    gpgident_field: gpgident,
                    config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_stedelijk'):
                                     percentage_inundation_urban,
                    config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_hoogwaardig'):
                                     percentage_inundation_agriculture,
                    config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_akker'):
                                     percentage_inundation_rural,
                    config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_grasland'):
                                      percentage_inundation_grass,
                    config.get('naverwerking_rrcf',
                            'field_percentage_overlast_stedelijk'):
                                      percentage_waterdamage_urban,
                    config.get('naverwerking_rrcf',
                            'field_percentage_overlast_hoogwaardig'):
                                      percentage_waterdamage_agriculture,
                    config.get('naverwerking_rrcf',
                            'field_percentage_overlast_akker'):
                                      percentage_waterdamage_rural,
                    config.get('naverwerking_rrcf',
                            'field_percentage_overlast_grasland'):
                                     percentage_waterdamage_grass,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_stedelijk'):
                                     toetsing_inundation_urban,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_hoogwaardig'):
                                      toetsing_inundation_agriculture,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_akker'):
                                     toetsing_inundation_rural,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_grasland'):
                                     toetsing_inundation_grass,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_stedelijk'):
                                     toetsing_waterdamage_urban,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_hoogwaardig'):
                                     toetsing_waterdamage_agriculture,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_akker'):
                                     toetsing_waterdamage_rural,
                    config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_grasland'):
                                     toetsing_waterdamage_grass,
                                     }
        #---------------------------------------------------------------------
        # Create output table
        if not gp.exists(output_result_table):
            log.info("Create new output table")
            temp_result_table = os.path.join(workspace_gdb, "result_table")
            gp.CreateTable_management(os.path.dirname(temp_result_table), os.path.basename(temp_result_table))
            copy_table = True
        else:
            temp_result_table = output_result_table
            copy_table = False

        fields_to_add = [config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_stedelijk'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_hoogwaardig'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_akker'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_inundatie_grasland'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_overlast_stedelijk'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_overlast_hoogwaardig'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_overlast_akker'),
                         config.get('naverwerking_rrcf',
                            'field_percentage_overlast_grasland'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_stedelijk'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_hoogwaardig'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_akker'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_inundatie_grasland'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_stedelijk'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_hoogwaardig'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_akker'),
                         config.get('naverwerking_rrcf',
                            'field_toetsing_overlast_grasland')]

        if not turtlebase.arcgis.is_fieldname(gp, temp_result_table, gpgident_field):
            log.debug(" - add field %s to %s" % (gpgident_field, temp_result_table))
            gp.addfield_management(temp_result_table, gpgident_field, 'text')

        for field in fields_to_add:
            if not turtlebase.arcgis.is_fieldname(gp, temp_result_table, field):
                log.debug(" - add field %s to %s" % (field, temp_result_table))
                gp.addfield_management(temp_result_table, field, 'double')

        #----------------------------------------------------------------------------------------
        # Write results to output table
        log.info("Write results to output table")
        turtlebase.arcgis.write_result_to_output(temp_result_table, gpgident_field.lower(), result_dict)

        if copy_table == True:
            gp.TableToTable_conversion(temp_result_table, os.path.dirname(output_result_table), os.path.basename(output_result_table))

        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        tempfiles = os.listdir(workspace)
        for tempfile in tempfiles:
            if tempfile.endswith('.asc'):
                try:
                    os.remove(os.path.join(workspace, tempfile))
                except Exception, e:
                    log.debug(e)
                    
        mainutils.log_footer()
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                        gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #---------------------------------------------------------------------
        # Input parameters
        """
        nodig voor deze tool:
        """
        tempfiles = []
        if len(sys.argv) == 6:
            input_hydrovak = sys.argv[1]
            optional_area = sys.argv[2][:10]
            output_shapefile = sys.argv[3]
            optional_bottleneck_points = sys.argv[4]
            optional_terminal_points = sys.argv[5]            
        else:
            log.warning("usage: <input_hydrovak> <output_shapefile> <optional_bottleneck_points> <optional_terminal_points> <optional_area>")
            sys.exit(1)

        tolerance_points = float(config.get('netwerkanalyse', 'tolerance_points'))
        input_shapefile = turtlebase.arcgis.get_random_file_name(workspace , ".shp")
        tempfiles.append(input_shapefile)
        gp.select_analysis(input_hydrovak, input_shapefile)

        #---------------------------------------------------------------------
        # Check required fields in input data
        ovk_field = config.get('general', 'ovkident')

        if not turtlebase.arcgis.is_fieldname(gp, input_shapefile, "ovkident"):
            errormsg = "fieldname %s not available in %s" % (
                                    "ovkident", input_shapefile)
            log.error(errormsg)
        #---------------------------------------------------------------------
        # add from and to coordinates
        update_to_and_from_coordinates(gp, input_shapefile, ovk_field)

        network_data = read_table(gp, config, input_shapefile, optional_area)
        
        g = turtlebase.network.import_dbf_into_graph(config, network_data,
                                                     tolerance_points, optional_area)
        turtlebase.network.let_it_stream(g)

        #create output:
        fields_to_add = [('incoming', 'SHORT'),
                         ('examined', 'SHORT'),
                         ('terminal', 'SHORT'),
                         ('som_oppvl', 'DOUBLE'),
                         ('bottleneck', 'SHORT'),
                         ('flip', 'SHORT')]
        gp.select_analysis(input_shapefile, output_shapefile)

        #fields_to_add = {'incoming':"SHORT",'examined':"SHORT",'terminal':"SHORT", 'cum_urban':"DOUBLE", 'cum_rural':"DOUBLE", 'bottleneck':"SHORT", 'flip':"SHORT"} #'ovkident':"TEXT",
        for field_to_add in fields_to_add:
            field_name = field_to_add[0]
            field_type = field_to_add[1]
            if turtlebase.arcgis.is_fieldname(gp, output_shapefile, field_name):
                gp.DeleteField_management(output_shapefile, field_name)
                gp.AddField_management(output_shapefile, field_name, field_type)
                log.info("Adding field %s" % field_name)
            else:
                gp.AddField_management(output_shapefile, field_name, field_type)
                log.info("Adding field %s" % field_name)

        turtlebase.network.save_result_shapefile(gp, config, g, output_shapefile)

        log.info("Recognizing bottlenecks")
        log.debug("create field to store bottlenecks")

        row = gp.UpdateCursor(output_shapefile)
        for item in nens.gp.gp_iterator(row):
            examined = item.getValue(config.get('netwerkanalyse', 'examined'))
            incoming = item.getValue(config.get('netwerkanalyse', 'incoming'))
            terminal = item.getValue(config.get('netwerkanalyse', 'terminal'))

            if terminal == 1 and examined == 0:
                item.SetValue(config.get('netwerkanalyse', 'bottleneck'), incoming)


            if incoming > 1 and examined == 0:
                item.SetValue(config.get('netwerkanalyse', 'bottleneck'), incoming)
            row.UpdateRow(item)

        # als de gebruiker heeft aangegeven de terminal points als puntenbestand te hebben
        # moeten eerst de begin x en begin y worden opgeslagen in een dictionary. daarvan
        # kan dan een puntenbestand gemaakt worden met functie
        if optional_bottleneck_points != '#':
            temp_shape = turtlebase.arcgis.get_random_file_name(workspace , ".shp")
            tempfiles.append(temp_shape)
            log.info("Creating bottleneck points file")
            create_point_file_from_polyline(gp, config, output_shapefile, temp_shape, 'bottlenecks')
            gp.Select_analysis(temp_shape, optional_bottleneck_points)
        # als de gebruiker heeft aangegeven de terminal points als puntenbestand te hebben
        # moeten eerst de begin x en begin y worden opgeslagen in een dictionary. daarvan
        # kan dan een puntenbestand gemaakt worden met functie

        if optional_terminal_points != "#":
            temp_shape2 = turtlebase.arcgis.get_random_file_name(workspace , ".shp")
            tempfiles.append(temp_shape2)
            log.info("Creating terminal points file")
            create_point_file_from_polyline(gp, config, output_shapefile, temp_shape2, 'terminals')
            gp.Select_analysis(temp_shape2, optional_terminal_points)

        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)
            turtlebase.arcgis.remove_tempfiles(gp, log, tempfiles)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        #check inputfields
        log.info("Getting commandline parameters")
        if len(sys.argv) == 5:
            input_peilgebieden_feature = sys.argv[1]
            input_kunstwerken_feature = sys.argv[2]
            input_afvoer_table = sys.argv[3]
            output_feature = sys.argv[4]
        else:
            log.error("Usage: python rural_afvoerrelaties.py \
            <peilgebieden feature> <kunstwerken feature> \
            <afvoerrelaties table> <output feature>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        #check input parameters
        gpgident = config.get('GENERAL', 'gpgident').lower()
        kwkident = config.get('GENERAL', 'kwkident').lower()

        log.info('Checking presence of input files')
        if not(gp.exists(input_peilgebieden_feature)):
            log.error("inputfile peilgebieden %s does not exist!" % input_peilgebieden_feature)
            sys.exit(5)

        if not(gp.exists(input_afvoer_table)):
            log.error("inputfile afvoerrelaties %s does not exist!" % input_afvoer_table)
            sys.exit(5)

        log.info('Input parameters checked')
        #----------------------------------------------------------------------------------------
        log.info("Prepare input_peilgebieden_feature")
        temp_peilgebieden_feature = turtlebase.arcgis.get_random_file_name(workspace_gdb)
        gp.Select_analysis(input_peilgebieden_feature, temp_peilgebieden_feature)

        add_centroids(gp, temp_peilgebieden_feature)
        peilgebieden_dict = nens.gp.get_table(gp, temp_peilgebieden_feature, primary_key=gpgident)

        if input_kunstwerken_feature != '#':
            log.info("Prepare input_kunstwerken_feature")
            temp_kunstwerken_feature = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            gp.Select_analysis(input_kunstwerken_feature, temp_kunstwerken_feature)

            gp.addxy(temp_kunstwerken_feature)
            kunstwerken_dict = nens.gp.get_table(gp, temp_kunstwerken_feature, primary_key=kwkident)
        else:
            kunstwerken_dict = {}

        log.info("Reading input_afvoer_table")
        relaties_dict = nens.gp.get_table(gp, input_afvoer_table, primary_key=kwkident)

        log.info("Calculating afvoerrelaties")
        afvoer_van = config.get('afvoerrelaties', 'input_peilg_from').lower()
        afvoer_naar = config.get('afvoerrelaties', 'input_peilg_to').lower()

        output_relations = {}
        data_source = "pg: %s, kw: %s, rel: %s" % (os.path.basename(input_peilgebieden_feature),
                                                   os.path.basename(input_kunstwerken_feature),
                                                   os.path.basename(input_afvoer_table))
        data_source = data_source[:50]

        for relation, attributes in relaties_dict.items():
            id_from = attributes[afvoer_van]
            id_to = attributes[afvoer_naar]
            item_id = "%s_%s" % (id_from, id_to)
            coords = []
            # get start coords
            x1 = peilgebieden_dict[id_from]['point_x']
            y1 = peilgebieden_dict[id_from]['point_y']
            coords.append((x1, y1))

            if relation in kunstwerken_dict:
                x2 = kunstwerken_dict[relation]['point_x']
                y2 = kunstwerken_dict[relation]['point_y']
                coords.append((x2, y2))

            if id_to in peilgebieden_dict:
                x3 = peilgebieden_dict[id_to]['point_x']
                y3 = peilgebieden_dict[id_to]['point_y']
            else:
                x3 = x1 + 10
                y3 = y1 + 10
            coords.append((x3, y3))

            output_relations[item_id] = {"Relation_id": item_id, "From": id_from, "To": id_to,
                                         "Structure": relation, "Source": data_source, "coords": coords}

        #put new data in output_table
        insert_count = draw_lines_from_dict(gp, output_relations, output_feature)
        log.info(" - %s records has been inserted" % insert_count)

        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #---------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(
                                            gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #---------------------------------------------------------------------
        # check inputfields
        log.info("Getting commandline parameters")
        if len(sys.argv) == 8:
            input_level_area_fc = sys.argv[1]
            input_level_area_table = sys.argv[2]
            input_ahn_raster = sys.argv[3]
            input_lgn_raster = sys.argv[4]
            input_lgn_conversion = sys.argv[5]
            input_onderbemalingen = sys.argv[6]
            if input_onderbemalingen == "#":
                use_onderbemalingen = False
            else:
                use_onderbemalingen = True
            output_file = sys.argv[7]
        else:
            log.error("Usage: python toetspuntenbepaling.py <ahn-file> \
                        <lgn-file> <onderbemalingen-optional> \
                        <peilgebieden-feature> <peilvakgegevens-table> \
                        <conversietabel> <outputfile-HydroBase>")
            sys.exit(1)
        #---------------------------------------------------------------------
        # check input parameters
        log.info('Checking presence of input files')
        if not(gp.exists(input_level_area_fc)):
            log.error("inputfile peilgebieden %s does not exist!",
                      input_level_area_fc)
            sys.exit(5)
        if not(gp.exists(input_level_area_table)):
            log.error("inputfile peilvakgegevens %s does not exist!",
                      input_level_area_table)
            sys.exit(5)
        if (use_onderbemalingen and not(gp.exists(input_onderbemalingen))):
            log.error("inputfile onderbemalingen %s does not exist!",
                      input_onderbemalingen)
            sys.exit(5)

        log.info('input parameters checked')

        #---------------------------------------------------------------------
        # Check geometry input parameters
        cellsize = gp.describe(input_ahn_raster).MeanCellHeight

        log.info("Check geometry of input parameters")
        geometry_check_list = []

        log.debug(" - check level area: %s" % input_level_area_fc)
        if gp.describe(input_level_area_fc).ShapeType != 'Polygon':
            errormsg = ("%s is not a polygon feature class!",
                        input_level_area_fc)
            log.error(errormsg)
            geometry_check_list.append(errormsg)

        if turtlebase.arcgis.fc_is_not_empty(gp, input_level_area_fc):
            errormsg = "input '%s' is empty" % input_level_area_fc
            log.error(errormsg)
            sys.exit(1)

        if turtlebase.arcgis.fc_is_not_empty(gp, input_level_area_table):
            errormsg = "input '%s' is empty" % input_level_area_table
            log.error(errormsg)
            sys.exit(1)

        if use_onderbemalingen:
            if turtlebase.arcgis.fc_is_not_empty(gp, input_onderbemalingen):
                errormsg = "input '%s' is empty" % input_onderbemalingen
                log.error(errormsg)
                sys.exit(1)

        log.debug(" - check ahn raster %s" % input_ahn_raster)
        if gp.describe(input_ahn_raster).DataType != 'RasterDataset':
            log.error("Input AHN is not a raster dataset")
            sys.exit(1)

        if gp.describe(input_ahn_raster).PixelType[0] not in ['U', 'S']:
            errormsg = ("Input AHN is a floating point raster, \
                        for this script an integer is nessecary")
            log.error(errormsg)
            geometry_check_list.append(errormsg)

        log.debug(" - check lgn raster %s" % input_lgn_raster)
        if gp.describe(input_lgn_raster).DataType != 'RasterDataset':
            log.error("Input LGN is not a raster dataset")
            sys.exit(1)

        if gp.describe(input_lgn_raster).PixelType[0] not in ['U', 'S']:
            errormsg = ("Input LGN is a floating point raster, \
                        for this script an integer is nessecary")
            log.error(errormsg)
            geometry_check_list.append(errormsg)

        if int(gp.describe(input_lgn_raster).MeanCellHeight) != int(cellsize):
            errormsg = ("Cell size of LGN is %s, must be %s" % (
                            gp.describe(input_lgn_raster).MeanCellHeight,
                            int(cellsize)))
            log.error(errormsg)
            geometry_check_list.append(errormsg)

        if len(geometry_check_list) > 0:
            log.error("check input: %s" % geometry_check_list)
            sys.exit(2)

        #---------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")
        gpgident = config.get('GENERAL', 'gpgident').lower()
        streefpeil = config.get('toetspunten', 'field_streefpeil').lower()

        missing_fields = []

        # check required fields from input data, append them to list if missing
        if not turtlebase.arcgis.is_fieldname(
                    gp, input_level_area_fc, gpgident):
            log.debug(" - missing: %s in %s", (
                        gpgident, input_level_area_fc))
            missing_fields.append("%s: %s", (
                        input_level_area_fc, gpgident))

        if not turtlebase.arcgis.is_fieldname(
                    gp, input_level_area_table, gpgident):
            log.debug(" - missing: %s in %s", (
                        gpgident, input_level_area_table))
            missing_fields.append("%s: %s", (
                        input_level_area_table, gpgident))

        if not turtlebase.arcgis.is_fieldname(
                    gp, input_level_area_table, streefpeil):
            log.debug(" - missing: %s in %s", (
                        streefpeil, input_level_area_table))
            missing_fields.append("%s: %s", (
                        input_level_area_table, streefpeil))

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s", missing_fields)
            sys.exit(2)

        #---------------------------------------------------------------------
        # Environments
        log.info("Set environments")
        temp_level_area = os.path.join(workspace_gdb, "peilgebieden")
        if input_level_area_fc.endswith(".shp"):
            log.info("Copy features of level areas to workspace")
            gp.select_analysis(input_level_area_fc, temp_level_area)
        else:
            log.info("Copy level areas to workspace")
            gp.copy_management(input_level_area_fc, temp_level_area)
        # use extent from level area
        gp.extent = gp.describe(temp_level_area).extent

        #---------------------------------------------------------------------
        # Create K5 LGN
        log.info("Translate LGN to NBW-classes")
        lgn_ascii = turtlebase.arcgis.get_random_file_name(
                                            workspace, ".asc")
        lgn_k5_ascii = turtlebase.arcgis.get_random_file_name(
                                            workspace, ".asc")

        gp.RasterToASCII_conversion(input_lgn_raster, lgn_ascii)
        lgn_ident = config.get('toetspunten', 'lgn_conv_ident')

        if input_lgn_conversion != '#':
            reclass_dict = nens.gp.get_table(gp, input_lgn_conversion,
                                             primary_key=lgn_ident)
            turtlebase.spatial.reclass_lgn_k5(
                            lgn_ascii, lgn_k5_ascii, reclass_dict)
        else:
            turtlebase.spatial.reclass_lgn_k5(lgn_ascii, lgn_k5_ascii)
        #---------------------------------------------------------------------
        # create ahn ascii
        log.info("Create ascii from ahn")

        ahn_ascii = turtlebase.arcgis.get_random_file_name(workspace, ".asc")
        log.debug("ahn ascii: %s" % ahn_ascii)
        gp.RasterToASCII_conversion(input_ahn_raster, ahn_ascii)

        #---------------------------------------------------------------------
        # Change ahn and lgn if use_ondermalingen == True
        if use_onderbemalingen:
            log.info("Cut out level deviations")
            gridcode_fieldname = "GRIDCODE"
            if not turtlebase.arcgis.is_fieldname(
                    gp, input_onderbemalingen, gridcode_fieldname):
                log.debug(" - add field %s" % gridcode_fieldname)
                gp.addfield_management(
                    input_onderbemalingen, gridcode_fieldname, "Short")

            row = gp.UpdateCursor(input_onderbemalingen)
            for item in nens.gp.gp_iterator(row):
                item.SetValue(gridcode_fieldname, 1)
                row.UpdateRow(item)

            onderbemaling_raster = turtlebase.arcgis.get_random_file_name(
                                                            workspace_gdb)
            gp.FeatureToRaster_conversion(
                            input_onderbemalingen, gridcode_fieldname,
                            onderbemaling_raster, cellsize)

            onderbemaling_asc = turtlebase.arcgis.get_random_file_name(
                                                    workspace, ".asc")
            gp.RasterToASCII_conversion(onderbemaling_raster,
                                        onderbemaling_asc)

            ahn_ascii = turtlebase.spatial.cut_out_onderbemaling(
                            ahn_ascii, onderbemaling_asc, workspace)
            lgn_k5_ascii = turtlebase.spatial.cut_out_onderbemaling(
                            lgn_k5_ascii, onderbemaling_asc, workspace)

        #---------------------------------------------------------------------
        # Add ID Int to level area
        log.info("Create level area ascii")
        id_int = 'id_int'
        area_id_dict = add_integer_ident(gp, temp_level_area,
                                         id_int, gpgident)

        out_raster_dataset = turtlebase.arcgis.get_random_file_name(
                                                        workspace_gdb)
        gp.FeatureToRaster_conversion(temp_level_area, id_int,
                                      out_raster_dataset, cellsize)

        id_int_ascii = turtlebase.arcgis.get_random_file_name(
                                            workspace, ".asc")
        log.debug("id_int_ascii: %s" % id_int_ascii)
        gp.RasterToASCII_conversion(out_raster_dataset, id_int_ascii)

        #---------------------------------------------------------------------
        log.info("Read targetlevel table")
        area_level_dict = nens.gp.get_table(gp, input_level_area_table,
                                            primary_key=gpgident)
        target_level_dict = {}

        for k, v in area_level_dict.items():
            if k in area_id_dict:
                int_id = area_id_dict[k][id_int]
                target_level_dict[int_id] = {'targetlevel': v[streefpeil],
                                             'gpgident': k}

        toetspunten_fields = ["DFLT_I_ST", "DFLT_I_HL", "DFLT_I_AK",
                              "DFLT_I_GR", "DFLT_O_ST", "DFLT_O_HL",
                              "DFLT_O_AK", "DFLT_O_GR", "MTGMV_I_ST",
                              "MTGMV_I_HL", "MTGMV_I_AK", "MTGMV_I_GR",
                              "MTGMV_O_ST", "MTGMV_O_HL", "MTGMV_O_AK",
                              "MTGMV_O_GR"]
        #---------------------------------------------------------------------
        log.info("calculate toetspunten")
        toetspunten_dict = turtlebase.spatial.calculcate_toetspunten(
                            ahn_ascii, lgn_k5_ascii, id_int_ascii,
                            toetspunten_fields, target_level_dict,
                            onderbemaling="#")
        #---------------------------------------------------------------------
        log.info("Create output table")
        create_output_table(gp, output_file, gpgident, toetspunten_fields)
        #---------------------------------------------------------------------
        # Add metadata
        import time
        date_time_str = time.strftime("%d %B %Y %H:%M:%S")
        source = input_ahn_raster

        for area_id, values in toetspunten_dict.items():
            toetspunten_dict[area_id]['date_time'] = date_time_str
            toetspunten_dict[area_id]['source'] = source

        #---------------------------------------------------------------------
        # Write results to output table
        log.info("Write results to output table")
        turtlebase.arcgis.write_result_to_output(
                    output_file, gpgident, toetspunten_dict)

        #---------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        tempfiles = os.listdir(workspace)
        for tempfile in tempfiles:
            if tempfile.endswith('.asc'):
                try:
                    os.remove(os.path.join(workspace, tempfile))
                except Exception, e:
                    log.debug(e)

        mainutils.log_footer()
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()

        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        #check inputfields
        log.info("Getting commandline parameters")
        if len(sys.argv) == 3:
            input_oppervlak = sys.argv[1]
            input_afvoer = sys.argv[2]

            log.info("input oppervlak: %s" % input_oppervlak)
            log.info("input afvoer: %s" % input_afvoer)
        else:
            log.error("Usage: python rural_controle_afvoer.py <rr_oppervlakte> <rr_afvoer>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        error_count = 0

        log.info("A-1) Read RR_Oppervlak")
        gpgident = config.get('GENERAL', 'gpgident').lower()
        if not turtlebase.arcgis.is_fieldname(gp, input_oppervlak, gpgident):
            log.error("field %s not found, we cannot continue" % gpgident)
            sys.exit(1)
        oppervlak_data = nens.gp.get_table(gp, input_oppervlak, primary_key=gpgident)

        log.info("A-2) Read RR_Afvoer")
        kwkident = config.get('GENERAL', 'kwkident').lower()
        if not turtlebase.arcgis.is_fieldname(gp, input_afvoer, kwkident):
            log.error("field %s not found, we cannot continue" % kwkident)
            sys.exit(1)
        afvoer_data = nens.gp.get_table(gp, input_afvoer, primary_key=kwkident)

        log.info("B-1) Checking links from KW's")
        afvoer_van = config.get('controle_afvoerrelaties', 'afvoer_van').lower()
        afvoer_naar = config.get('controle_afvoerrelaties', 'afvoer_naar').lower()
        boundary_str = config.get('controle_afvoerrelaties', 'boundary_str')
        for kwk_id, value in afvoer_data.items():
            if (afvoer_data[kwk_id][afvoer_van] != boundary_str) and not(oppervlak_data.has_key(value[afvoer_van])):
                log.error("[" + kwkident + "] = " + kwk_id + ", field " + afvoer_van + ": [" + gpgident + "] = '" + afvoer_data[kwk_id][afvoer_van] + "' not found in RR_Oppervlak.")
                error_count += 1
            if (afvoer_data[kwk_id][afvoer_naar] != boundary_str) and not(oppervlak_data.has_key(value[afvoer_naar])):
                log.error("[" + kwkident + "] = " + kwk_id + ", field " + afvoer_naar + ": [" + gpgident + "] = '" + afvoer_data[kwk_id][afvoer_naar] + "' not found in RR_Oppervlak.")
                error_count += 1

        log.info("B-2) Checking links from GPG's")
        for gpg_ident in oppervlak_data.keys():
            #try to find gpg_ident in afvoer_data
            for value in afvoer_data.values():
                if value[afvoer_naar] == gpg_ident:
                    break
                if value[afvoer_van] == gpg_ident:
                    break
            else:
                log.error("%s: %s not found in RR_Afvoer." % (gpgident, gpg_ident))
                error_count += 1

        if error_count == 0:
            log.info("No errors were found.")
        else:
            log.warning("%s error(s) were found." % error_count)
        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.warning("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()
    except:
        log.error(traceback.format_exc())
        sys.exit(1)

    finally:
        logging_config.cleanup()
        del gp
def main():
    try:
        gp = mainutils.create_geoprocessor()
        config = mainutils.read_config(__file__, 'turtle-settings.ini')
        logfile = mainutils.log_filename(config)
        logging_config = LoggingConfig(gp, logfile=logfile)
        mainutils.log_header(__name__)

        #----------------------------------------------------------------------------------------
        # Create workspace
        workspace = config.get('GENERAL', 'location_temp')
        if workspace == "-":
            workspace = tempfile.gettempdir()
                        
        turtlebase.arcgis.delete_old_workspace_gdb(gp, workspace)

        if not os.path.isdir(workspace):
            os.makedirs(workspace)
        workspace_gdb, errorcode = turtlebase.arcgis.create_temp_geodatabase(gp, workspace)
        if errorcode == 1:
            log.error("failed to create a file geodatabase in %s" % workspace)

        #----------------------------------------------------------------------------------------
        if len(sys.argv) == 4:
            log.info("Reading and checking input")
            waterlijnen = sys.argv[1]
            peilgebieden = sys.argv[2]
            output_file = sys.argv[3]
        else:
            log.error("usage: <waterlijnen> <peilgebieden> <output_file>")
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        # Check required fields in input data
        log.info("Check required fields in input data")
        missing_fields = []

        #<check required fields from input data, append them to list if missing>"
        gpgident = config.get('GENERAL', 'gpgident')
        check_fields = {peilgebieden: [gpgident]}
        for input_fc, fieldnames in check_fields.items():
            for fieldname in fieldnames:
                if not turtlebase.arcgis.is_fieldname(gp, input_fc, fieldname):
                    errormsg = "fieldname %s not available in %s" % (fieldname, input_fc)
                    log.error(errormsg)
                    missing_fields.append(errormsg)

        if len(missing_fields) > 0:
            log.error("missing fields in input data: %s" % missing_fields)
            sys.exit(2)
        #----------------------------------------------------------------------------------------

        try:
            #bepaald centroide van peilvakken en stop in dictionary
            #dict wordt {<gpgident>:[centroid:<centroid>]}
            peilgebieden_centroides_dict = bepalen_centroides(gp, peilgebieden, gpgident)
            # Eerst een intersect van de peilgebiden met de waterlijnen
            #extract de shapefiles uit de geodatabase
            log.info("Kopieer " + waterlijnen + " naar de workspace")
            waterlijnen_lokaal = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            log.debug("Kopieer de waterlijnen naar een lokale directory")
            gp.select_analysis(waterlijnen, waterlijnen_lokaal)

            intersect = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            gp.Intersect_analysis(waterlijnen_lokaal + ";" + peilgebieden, intersect)

            log.info("Reading line features")
            #nu uitlezen van de nodes van de waterlijnen
            waterlijnen_nodes_dict = reading_line_features_nodes(gp, waterlijnen_lokaal)

            nodes = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            create_point_file_from_dict(gp, waterlijnen_nodes_dict, nodes, "nodes_id")
            #nu koppel de nodes aan de peilgebieden dmv een spatial join
            spat_jn_nodes = turtlebase.arcgis.get_random_file_name(workspace_gdb)
            gp.SpatialJoin_analysis(nodes, peilgebieden, spat_jn_nodes)

            #uitlezen van de nodes van de waterlijnen inclusief gpgident
            waterlijnen_nodes_dict = reading_line_feature_nodes_to_dict_according_to_peilgebied(gp, spat_jn_nodes, gpgident)

            #uitlezen van de vertices van de waterlijnen
            waterlijnen_vertex_dict = reading_line_features_vertices(gp, intersect, gpgident, peilgebieden_centroides_dict)

            #bereken het punt het dichtst bij de centroide van een peilgebied. Kijk eerst naar de nodes, dan naar vertices en indien geen waterlijn aanwezig maak dan
            # centroide punt aan van het peilgebied
            dictionary_with_closest_point_to_centroid_on_waterlijn = calculate_minimal_distance_between_points(gp, peilgebieden_centroides_dict, waterlijnen_nodes_dict, waterlijnen_vertex_dict)

            output_centroid_file = turtlebase.arcgis.get_random_file_name(workspace_gdb)

            create_point_file_from_dict(gp, dictionary_with_closest_point_to_centroid_on_waterlijn, output_centroid_file, gpgident)
            gp.select_analysis(output_centroid_file, output_file)

        except Exception, e:
            errormsg = traceback.extract_tb(sys.exc_traceback)
            log.error(errormsg)
            log.error(e)
            sys.exit(1)

        #----------------------------------------------------------------------------------------
        # Delete temporary workspace geodatabase & ascii files
        try:
            log.debug("delete temporary workspace: %s" % workspace_gdb)
            gp.delete(workspace_gdb)

            log.info("workspace deleted")
        except:
            log.debug("failed to delete %s" % workspace_gdb)

        mainutils.log_footer()