示例#1
0
def cargaRadar(inputfile, delim, id_radar, connstr, tableoutput):
    # Abrir conexion para insertar valores
    ogr.UseExceptions()
    try:
        conn = ogr.Open(connstr)
        print "Conexion exitosa..."
    except:
        print '[ ERROR ]: Error de conexion'
        return
        #sys.exit( 1 )

    # Abrir archivo de texto
    try:
        file = open(inputfile)
        print 'Archivo leido exitosamente...'
    except:
        print '[ ERROR ]: Error al leer el archivo'
        return
        #sys.exit(1)
    line = file.readline()
    line = file.readline()

    # Insertar registros en la tabla
    aux = 1
    fecha_anterior = '1900-01-01'
    # TODO: eliminar el limte de registros antes de empaquetar
    limite = 100
    while (line != "" and aux <= limite):
        Line = line.split(delim)
        fecha = Line[0]
        x = Line[1]
        y = Line[2]
        deformacion = Line[3]
        if fecha != fecha_anterior:
            print "Cargando fecha '%s'..." % (str(fecha))
            fecha_anterior = fecha
        sql = "INSERT INTO %s VALUES ('%s', '%s', '%s', '%s', '%s');" % (
            tableoutput, fecha, id_radar, x, y, deformacion)
        try:
            conn.ExecuteSQL(sql)
        except Exception, e:
            print '[ ERROR ]: Error al cargar los datos \n Mensaje %s' % (e)
            return
            #sys.exit(1)
        # TODO: eliminar este print de control
        if aux == 1 or aux == 10 or aux == 100 or aux == 1000 or aux == 10000:
            print 'insertando registro ' + str(aux) + '...'
        aux = aux + 1
        line = file.readline()
示例#2
0
    def __return_a_list_of_fc(self, fgdb_full_path):

        # The file geodatabase will be read and each feature class will be added to the feature_class_list.
        feature_class_list = []

        # Append each feature class in the Esri File Geodatabase to the feature_class_list.
        # REF: https://pcjericks.github.io/py-gdalogr-cookbook/vector_layers.html|
        # "Get all layers in an Esri File GeoDataBase"
        ogr.UseExceptions()
        driver = ogr.GetDriverByName("OpenFileGDB")
        gdb = driver.Open(fgdb_full_path)
        for feature_class_idx in range(gdb.GetLayerCount()):
            feature_class = gdb.GetLayerByIndex(feature_class_idx)
            feature_class_list.append(feature_class.GetName())

        return feature_class_list
示例#3
0
 def __init__(self,shapefile,fieldnames=[], fields={},srs_wkt=None,update=False):
     ''' Open the shapefile for writing or appending.
         @type shapefile:  C{gdal.Dataset}
         @param shapefile: Dataset object
         @type fields:     C{list}
         @param fields:    L{Fields order list}
         @type fields:     C{dict}
         @param fields:    L{Fields dict<formats.fields>}
         @type srs_wkt:    C{str}
         @param srs_wkt:   Spatial reference system WKT
         @type update:     C{boolean}
         @param update:    Update or overwrite existing shapefile
         @note: Field names can only be <= 10 characters long. Longer names will be silently truncated. This may result in non-unique column names, which will definitely cause problems later.
                Field names can not contain spaces or special characters, except underscores.
                Starting with version 1.7, the OGR Shapefile driver tries to generate unique field names. Successive duplicate field names, including those created by truncation to 10 characters, will be truncated to 8 characters and appended with a serial number from 1 to 99.
         @see: U{http://www.gdal.org/ogr/drv_shapefile.html}
     '''
     gdal.ErrorReset()
     ogr.UseExceptions()
     self.driver = ogr.GetDriverByName('ESRI Shapefile')
     self.srs=osr.SpatialReference()
     self.filename=shapefile
     self.srs_wkt=srs_wkt
     self.fieldsnames=[]#Truncated fields names
     self.shpfieldsnames=[]
     self.fields={}
     self.shpfields={}
     if fieldnames == None:fieldnames=sorted(self.fields.keys())
     for fieldname in fieldnames:
         if fieldname[0:10] not in self.fieldsnames:
             self.fieldsnames.append(fieldname[0:10])
             self.fields[fieldname[0:10]]=fields[fieldname]
             #print fieldname[0:10],':',self.fields[fieldname[0:10]]
     try:
         if update and os.path.exists(shapefile):
             print 'EDIT shape mode'
             self.shape=self.__openshapefile__()
         else:
             print 'CREATE shape mode'
             self.shape=self.__createshapefile__()
     except Exception, err:
         self.__error__(err)
    def open(self):
        try:
            ogr.UseExceptions()
            if self.ds == None:
                self.ds = self.driver.Open(self.file, 0)

        except Exception as ex:
            no_extension = self.conf_param["logsText"][
                'cannot_open_file'].copy()
            initial_err = self.activ_cod + "|" + self.splitroot(
                self.root, self.activ_cod
            ) + "|" + self.layer + "|" + self.logFile.getCatValue(
                self.conf_param['VectorFormats'][
                    self.type]) + "|" + self.logFile.getIssueValue(
                        self.conf_param['VectorFormats'][
                            self.type]) + "|" + ex.args[0] + ' '
            no_extension.insert(0, initial_err)
            self.logFile.writelogs(no_extension, self.conf_param["root_path"])

        finally:
            return self.ds != None
示例#5
0
    def new_layer(self, abs_dest, name, t):

        ogr.UseExceptions()

        driver = ogr.GetDriverByName(self.driver_name)

        ds = driver.CreateDataSource(abs_dest)

        if ds is None:
            raise ExtractError(
                "Failed to create data source for driver '{}' at dest '{}'" .format(
                    self.driver_name,
                    abs_dest))

        srs = ogr.osr.SpatialReference()
        srs.ImportFromEPSG(self.epsg)

        # Gotcha! You can't create a layer with a unicode layername!
        # http://gis.stackexchange.com/a/53939/12543
        layer = ds.CreateLayer(name.encode('utf-8'), srs, self.geo_map[t])

        return ds, layer
示例#6
0
# -*- coding: UTF-8 -*-
"""Geodatabase class representing a file geodatabase object."""

import ogr
ogr.UseExceptions()


########################################################################
class Geodatabase(object):
    """File geodatabase object."""

    # ----------------------------------------------------------------------
    def __init__(self, path):
        """Initialize Geodatabase class with basic properties."""
        self.path = path
        self.ds = None
        return

    # ----------------------------------------------------------------------
    def get_items(self):
        """Get list of tables and feature classes inside a file gdb."""
        ds = ogr.Open(self.path, 0)
        return list({
            ds.GetLayerByIndex(i).GetName()
            for i in range(0, ds.GetLayerCount())
        })

    # ----------------------------------------------------------------------
    def get_schemas(self):
        """Get all tables and feature classes inside a file gdb.
        coord_end = re.search('\'', new_data)
        #compose a string with found pattern and coordinates in brackets
        geomStr = text.group() + new_data[:coord_end.start()]
    except AttributeError:
        print (searchPattern + ' pattern is not found!!')
        geomStr = 'NONE'
    return geomStr


# In[ ]:


#url path for retriving data for each feature
#the name of the feature should be appended
url = "https://planetarynames.wr.usgs.gov/Feature/"
ogr.UseExceptions()  # Enable errors
file_csv = 'Mars_short.csv'

if (len(sys.argv) > 1):
    file_csv = sys.argv[1]


# In[ ]:


# no headers should be present in a csv file
# the first two values must be featureID and feature_name
# JSON file is written with utf8 due to unicode standard of publications
with open(file_csv, mode='r') as fin, io.open('features.json', 'w', encoding='utf-8') as fout:
    reader = csv.reader(fin, delimiter=',')
    for rows in reader:
示例#8
0
def simplify_polygon(path_shp, para, path_output):
    gdal.UseExceptions()
    ogr.UseExceptions()

    def addPolygon(simplePolygon, dst_layer, index):
        featureDefn = dst_layer.GetLayerDefn()
        polygon = ogr.CreateGeometryFromWkb(simplePolygon)
        dst_feat = ogr.Feature(featureDefn)
        dst_feat.SetGeometry(polygon)

        geom = dst_feat.GetGeometryRef()
        dst_feat.SetField('id', index)
        dst_feat.SetField('area', geom.Area())

        dst_layer.CreateFeature(dst_feat)
        dst_layer.SyncToDisk()

    def multipoly2poly(src_lyr, para, dst_layer):
        count = 0
        for src_feat in src_lyr:
            if src_feat.GetField(0) > 0:
                count = count + 1
                geom = src_feat.GetGeometryRef()
                if geom.GetGeometryName() == 'MULTIPOLYGON':
                    for geom_part in geom:
                        x = geom_part.SimplifyPreserveTopology(para)
                        addPolygon(x.ExportToWkb(), dst_layer, count)
                else:
                    x = geom.SimplifyPreserveTopology(para)
                    addPolygon(x.ExportToWkb(), dst_layer, count)
            else:
                continue

    # Reading source shp
    drv = ogr.GetDriverByName('ESRI Shapefile')
    src_ds = drv.Open(path_shp, 0)
    src_lyr = src_ds.GetLayer()

    # Reading source coordinate system
    src_srs = osr.SpatialReference()

    # from Layer
    spatialRef = src_lyr.GetSpatialRef()
    # from Geometry
    feature = src_lyr.GetNextFeature()
    geom = feature.GetGeometryRef()
    spatialRef = geom.GetSpatialReference()
    src_srs.ImportFromWkt(spatialRef.ExportToWkt())

    # Creating destination shp
    dst_ds = drv.CreateDataSource(path_output)
    dst_layer = dst_ds.GetLayerByName(path_output)
    dst_layer = dst_ds.CreateLayer(path_output,
                                   geom_type=ogr.wkbPolygon,
                                   srs=src_srs)

    # Add an ID and area field
    idField = ogr.FieldDefn('id', ogr.OFTInteger)
    areaField = ogr.FieldDefn('area', ogr.OFTReal)
    dst_layer.CreateField(idField)
    dst_layer.CreateField(areaField)

    # Simplification of polygons
    multipoly2poly(src_lyr, para, dst_layer)
def run_check(self,
              condition,
              parameter_name,
              parameter_value,
              fail_response,
              other_values=None):
    """
    The run_check utility function is used to store all of the checks done within the command classes. There are
    many types of checks that are performed on the command parameter values before a command can be run. Initially,
    the GeoProcessor was designed in a way that the checks for each command class (including messages and
    recommendations) were within each command class. This new design allows for all of the messages and recommendations
    (quite clunky and ugly code) to be pulled away from the command class and placed in this utility function.

    A benefit to this design is that popular checks that are called on many commands (for example: is the CRS_code, the
    coordinate reference code, valid) are only written out once. Then the same check can be called from however many
    command classes necessary. If the message and recommendation strings are changed for a given check, those messages
    only have to be changed once here in this utility command rather than in multiple command classes.

    Each check has a name called the condition. The checks are alphabetized below by their condition statement. In
    the developer documentation there is explanation for each available check. This way, when there are additional
    parameters required (entered by the other_values parameter), the developer knows exactly what the check requires.
    Before utilizing a check in the command class, it is highly recommended that the developer documentation for that
    check if read.

    Each check condition statement is written in a way that answers YES (or TRUE) if the check passes. This makes it
    easy for checks to be written and standardized by multiple developers.

    Args:
        self: the class object of the command being checked
        condition: the condition statement (or name) of the check that is to be run
        parameter_name: the command parameter being checked (the name, not the value)
        parameter_value: the command parameter value being checked (the value, not the name)
        fail_response: the action that occurs if the check fails. The available options are as follows:
            (1) FAIL: a FAIL message is logged and the function returns FALSE for run_the_command Boolean
            (2) WARN: a WARN message is logged and the function returns TRUE for run_the_command Boolean
            (3) WARNBUTDONOTRUN: a WARN message is logged and the function returns FALSE for run_the_command Boolean
        other_values: an optional argument that allows the checks to take in more than one parameter_value for the check
            refer to the developer documentation for each individual check to determine if the other_values argument is
            used for that check.

    Returns:
        run_the_command: Boolean. If True, the check has determined that it is ok for the command to run. If False, the
        check has determined that it is not ok for the command to run.
    """

    # Boolean to determine if the check failed. Set to FALSE until the check FAILS.
    check_failed = False

    # Check if the attributes in a list exist in a GeoLayer based off of its attribute name.
    if condition.upper() == "DOATTRIBUTESEXIST":
        geolayer_id = other_values[0]

        # Get the GeoLayer.
        input_geolayer = self.command_processor.get_geolayer(geolayer_id)

        # Get the existing attribute names of the input GeoLayer.
        list_of_existing_attributes = input_geolayer.get_attribute_field_names(
        )

        # Create a list of invalid input attribute names. An invalid attribute name is an input attribute name
        # that is not matching any of the existing attribute names of the GeoLayer.
        invalid_attrs = []
        for attr in parameter_value:
            if attr not in list_of_existing_attributes:
                invalid_attrs.append(attr)

        # The message is dependent on the invalid_attrs varaible. Assign message AFTER invalid_attrs variable has been
        # created.
        message = "The following attributes ({}) of the {} parameter do" \
                  " not exist within the GeoLayer ({}).".format(invalid_attrs, parameter_name, geolayer_id)
        recommendation = "Specify valid attribute names."

        # If there are invalid attributes, the check failed.
        if invalid_attrs:
            check_failed = True

    # Check if the parameter value (absolute file path) has a valid and existing folder.
    elif condition.upper() == "DOESFILEPATHHAVEAVALIDFOLDER":

        message = 'The folder of the {} ({}) is not a valid folder.'.format(
            parameter_name, parameter_value)
        recommendation = "Specify a valid folder for the {} parameter.".format(
            parameter_name)

        output_folder = os.path.dirname(parameter_value)
        if not os.path.isdir(output_folder):
            check_failed = True

    # Check if the GeoLayer of the parameter value (GeoLayer ID) has the correct geometry type.
    elif condition.upper() == "DOESGEOLAYERIDHAVECORRECTGEOMETRY":
        desired_geom_type_list = [item.upper() for item in other_values[0]]

        message = 'The {} ({}) does not have geometry in the correct ' \
                  'format ({}).'.format(parameter_name, parameter_value, desired_geom_type_list)
        recommendation = 'Specify a GeoLayerID of a GeoLayer with geometry in' \
                         ' correct format ({}).'.format(desired_geom_type_list)

        if not self.command_processor.get_geolayer(
                parameter_value).get_geometry().upper(
                ) in desired_geom_type_list:
            check_failed = True

    # Check if the GeoLayer of the parameter value (GeoLayer ID) has a different CRS than another GeoLayer (referenced
    # by its GeoLayer ID)
    elif condition.upper() == "DOGEOLAYERIDSHAVEMATCHINGCRS":
        second_parameter_name = other_values[0]
        second_parameter_value = other_values[1]

        message = 'The {} ({}) and the {} ({}) do not have the same coordinate reference' \
                  ' system.'.format(parameter_name, parameter_value, second_parameter_name, second_parameter_value)
        recommendation = 'Specify GeoLayers that have the same coordinate reference system.'

        input_crs = self.command_processor.get_geolayer(
            parameter_value).get_crs()
        second_crs = self.command_processor.get_geolayer(
            second_parameter_value).get_crs()

        if not input_crs == second_crs:
            check_failed = True

    # Check if the parameter value (crs code)is a valid CRS code usable in the QGIS environment.
    elif condition.upper() == "ISCRSCODEVALID":

        message = 'The {} ({}) is not a valid CRS code.'.format(
            parameter_name, parameter_value)
        recommendation = 'Specify a valid CRS code (EPSG codes are an approved format).'

        if qgis_util.get_qgscoordinatereferencesystem_obj(
                parameter_value) is None:
            check_failed = True

    # Check if the parameter value (DataStoreID) is an existing DataStoreID.
    elif condition.upper() == "ISDATASTOREIDEXISTING":

        message = 'The {} ({}) is not a valid DataStore ID.'.format(
            parameter_name, parameter_value)
        recommendation = 'Specify a valid DataStore ID.'

        if not self.command_processor.get_datastore(parameter_value):
            check_failed = True

    # Check if the parameter value (DataStore ID) is a unique DataStoreID.
    elif condition.upper() == "ISDATASTOREIDUNIQUE":

        message = 'The {} ({}) value is already in use as a DataStore ID.'.format(
            parameter_name, parameter_value)
        recommendation = 'Specify a new {}.'.format(parameter_name)

        if self.command_processor.get_geolayer(parameter_value):
            check_failed = True

            pv_IfDataStoreIDExists = self.get_parameter_value(
                "IfDataStoreIDExists", default_value="Replace")

            if pv_IfDataStoreIDExists.upper() == "REPLACEANDWARN":
                fail_response = "WARN"
            elif pv_IfDataStoreIDExists.upper() == "WARN":
                fail_response = "WARNBUTDONOTRUN"
            elif pv_IfDataStoreIDExists.upper() == "FAIL":
                fail_response = "FAIL"
            elif pv_IfDataStoreIDExists.upper() == "OPEN":
                check_failed = False
            elif pv_IfDataStoreIDExists.upper() == "REPLACE":
                check_failed = False

    # Check if the parameter value (Table Name) is unique within the DataStore.
    elif condition.upper() == "ISDATASTORETABLEUNIQUE":
        data_store_id = other_values[0]

        message = "The {} ({}) value is already an existing table in the {} DataStore.".format(
            parameter_name, parameter_value, data_store_id)
        recommendation = "Specify a unique {} value.".format(parameter_name)

        data_store_obj = self.command_processor.get_datastore(data_store_id)
        list_of_tables = data_store_obj.return_table_names()
        if parameter_value in list_of_tables:
            check_failed = True

    # Check if the parameter value (column name) is a valid column name of a delimited file.
    elif condition.upper() == "ISDELIMITEDFILECOLUMNNAMEVALID":

        delimited_file_abs = other_values[0]
        delimiter = other_values[1]

        message = "The {} ({}) is not a valid column name in the delimited file ({}).".format(
            parameter_name, parameter_value, delimited_file_abs)
        recommendation = "Specify an existing and valid {}.".format(
            parameter_name)

        if parameter_value not in io_util.get_col_names_from_delimited_file(
                delimited_file_abs, delimiter):
            check_failed = True

    # Check if the paramter value (sheet name) is a valid sheet name of an excel file.
    elif condition.upper() == "ISEXCELSHEETNAMEVALID":

        excel_file_abs = other_values[0]

        message = "The {} ({}) is not a valid excel worksheet name in the excel file ({}).".format(
            parameter_name, parameter_value, excel_file_abs)
        recommendation = "Specify an existing and valid {}.".format(
            parameter_name)

        excel_workbook_obj = pandas_util.create_excel_workbook_obj(
            excel_file_abs)
        excel_worksheet_list = excel_workbook_obj.sheet_names

        if parameter_value not in excel_worksheet_list:
            check_failed = True

    # Check if the parameter value (feature class) is within a file geodatabase.
    elif condition.upper() == "ISFEATURECLASSINFGDB":

        file_gdb_path_abs = other_values[0]

        message = "The {} ({}) is not a valid feature class in the file geodatabase ({}).".format(
            parameter_name, parameter_value, file_gdb_path_abs)
        recommendation = "Specify an existing and valid {}.".format(
            parameter_name)
        ogr.UseExceptions()
        driver = ogr.GetDriverByName("OpenFileGDB")
        gdb = driver.Open(file_gdb_path_abs)
        feature_class_list = []
        for feature_class_idx in range(gdb.GetLayerCount()):
            feature_class = gdb.GetLayerByIndex(feature_class_idx)
            feature_class_list.append(feature_class.GetName())

        if parameter_value not in feature_class_list:
            check_failed = True

    # Check if the parameter value (absolute file path) is a valid and existing file.
    elif condition.upper() == "ISFILEPATHVALID":

        message = "The {} ({}) is not a valid file.".format(
            parameter_name, parameter_value)
        recommendation = "Specify a valid file for the {} parameter.".format(
            parameter_name)

        if not os.path.isfile(parameter_value):
            check_failed = True

    # Check if the parameter value (absolute folder path) is a valid file geodatabase.
    elif condition.upper() == "ISFOLDERAFGDB":

        message = "The {} ({}) is not a valid file geodatabase.".format(
            parameter_name, parameter_value)
        recommendation = "Specify a valid file geodatabase for the {} parameter.".format(
            parameter_name)

        ogr.UseExceptions()
        driver = ogr.GetDriverByName("OpenFileGDB")
        if driver.Open(parameter_value) is None:
            check_failed = True

    # Check if the parameter value (absolute folder path) is a valid and existing folder.
    elif condition.upper() == "ISFOLDERPATHVALID":

        message = "The {} ({}) is not a valid folder.".format(
            parameter_name, parameter_value)
        recommendation = "Specify a valid folder for the {} parameter.".format(
            parameter_name)

        if not os.path.isdir(parameter_value):
            check_failed = True

    # Check if the parameter value (GeoLayerID) is an existing GeoLayerID.
    elif condition.upper() == "ISGEOLAYERIDEXISTING":

        message = 'The {} ({}) is not a valid GeoLayer ID.'.format(
            parameter_name, parameter_value)
        recommendation = 'Specify a valid GeoLayer ID.'

        if not self.command_processor.get_geolayer(parameter_value):
            check_failed = True

    # Check if the parameter value (GeoLayer ID) is a unique GeoLayerID.
    elif condition.upper() == "ISGEOLAYERIDUNIQUE":

        message = 'The {} ({}) value is already in use as a GeoLayer ID.'.format(
            parameter_name, parameter_value)
        recommendation = 'Specify a new {}.'.format(parameter_name)

        if self.command_processor.get_geolayer(parameter_value):
            check_failed = True
            pv_IfGeoLayerIDExists = self.get_parameter_value(
                "IfGeoLayerIDExists", default_value="Replace")

            if pv_IfGeoLayerIDExists.upper() == "REPLACEANDWARN":
                fail_response = "WARN"
            elif pv_IfGeoLayerIDExists.upper() == "WARN":
                fail_response = "WARNBUTDONOTRUN"
            elif pv_IfGeoLayerIDExists.upper() == "FAIL":
                fail_response = "FAIL"
            else:
                check_failed = False

    # Check if the parameter value (integer) is between or at two values/numbers.
    elif condition.upper() == "ISINTBETWEENRANGE":
        int_min = other_values[0]
        int_max = other_values[1]

        message = 'The {} ({}) must be at or between {} & {}'.format(
            parameter_name, parameter_value, int_min, int_max)
        recommendation = 'Specify a valid {} value.'.format(parameter_name)

        if not validate_int_in_range(parameter_value, int_min, int_max, False,
                                     False):
            check_failed = True

    # Check if the length of a list is correct.
    elif condition.upper() == "ISLISTLENGTHCORRECT":
        delimiter = other_values[0]
        correct_length = other_values[1]

        message = 'The {} ({}) must have {} number of items.'.format(
            parameter_name, parameter_value, correct_length)
        recommendation = 'Specify a list of {} items for the {} parameter.'.format(
            correct_length, parameter_name)

        # Convert the string into a list.
        list_of_strings = string_util.delimited_string_to_list(
            parameter_value, delimiter)
        if len(list_of_strings) != correct_length:
            check_failed = True

    # Check if the property name is a unique property name
    elif condition.upper() == "ISPROPERTYUNIQUE":

        message = 'The {} ({}) value is already in use.'.format(
            parameter_name, parameter_value)
        recommendation = 'Specify a new {}.'.format(parameter_name)

        if self.command_processor.get_property(parameter_value):
            check_failed = True
            pv_IfPropertyExists = self.get_parameter_value(
                "IfPropertyExists", default_value="Replace")

            if pv_IfPropertyExists.upper() == "REPLACEANDWARN":
                fail_response = "WARN"
            elif pv_IfPropertyExists.upper() == "WARN":
                fail_response = "WARNBUTDONOTRUN"
            elif pv_IfPropertyExists.upper() == "FAIL":
                fail_response = "FAIL"
            else:
                check_failed = False

    # Check if the input string is a valid QGSExpression.
    elif condition.upper() == "ISQGSEXPRESSIONVALID":

        message = "{} ({}) is not a valid QgsExpression.".format(
            parameter_name, parameter_value)
        recommendation = "Specify a valid QgsExpression for {}.".format(
            parameter_name)

        if qgis_util.get_qgsexpression_obj(parameter_value) is None:
            check_failed = True

    # Check if the input string is the correct length.
    elif condition.upper() == "ISSTRINGLENGTHCORRECT":

        correct_length = other_values[0]

        message = 'The {} ({}) must have exactly {} character(s).'.format(
            parameter_name, parameter_value, correct_length)
        recommendation = 'Specify a string with {} characters for the {} parameter.'.format(
            correct_length, parameter_name)

        # Convert the string into a list.
        if len(parameter_value) != correct_length:
            check_failed = True

    # Check if the parameter value (Table ID) is an existing Table ID.
    elif condition.upper() == "ISTABLEIDEXISTING":

        message = 'The {} ({}) is not a valid Table ID.'.format(
            parameter_name, parameter_value)
        recommendation = 'Specify a valid Table ID.'

        if not self.command_processor.get_table(parameter_value):
            check_failed = True

    # Check if the parameter value (Table ID) is a unique TableID.
    elif condition.upper() == "ISTABLEIDUNIQUE":

        message = 'The {} ({}) value is already in use as a Table ID.'.format(
            parameter_name, parameter_value)
        recommendation = 'Specify a new {}.'.format(parameter_name)

        if self.command_processor.get_table(parameter_value):

            check_failed = True
            pv_IfTableIDExists = self.get_parameter_value(
                "IfTableIDExists", default_value="Replace")

            if pv_IfTableIDExists.upper() == "REPLACEANDWARN":
                fail_response = "WARN"
            elif pv_IfTableIDExists.upper() == "WARN":
                fail_response = "WARNBUTDONOTRUN"
            elif pv_IfTableIDExists.upper() == "FAIL":
                fail_response = "FAIL"
            else:
                check_failed = False

    # Check if the parameter value (Table Name) is a table within the DataStore.
    elif condition.upper() == "ISTABLEINDATASTORE":
        data_store_id = other_values[0]

        message = "{} ({}) is not an existing table in the {} DataStore.".format(
            parameter_name, parameter_value, data_store_id)
        recommendation = "Specify a valid {} value.".format(parameter_name)

        data_store_obj = self.command_processor.get_datastore(data_store_id)
        list_of_tables = data_store_obj.return_table_names()
        if parameter_value not in list_of_tables:
            check_failed = True

    # Check if the file is a valid tar file.
    elif condition.upper() == "ISTARFILE":

        message = "{} ({}) is not a valid TAR file.".format(
            parameter_name, parameter_value)
        recommendation = "Specify a valid TAR file for {}.".format(
            parameter_name)

        if not zip_util.is_tar_file(parameter_value):
            check_failed = True

    # Check if the input string is a valid URL.
    elif condition.upper() == "ISURLVALID":

        message = "{} ({}) is not a valid URL.".format(parameter_name,
                                                       parameter_value)
        recommendation = "Specify a valid URL for {}.".format(parameter_name)

        try:
            urlopen(parameter_value)
        except:
            check_failed = True

    # Check if the file is a valid zip file.
    elif condition.upper() == "ISZIPFILE":

        message = "{} ({}) is not a valid ZIP file.".format(
            parameter_name, parameter_value)
        recommendation = "Specify a valid ZIP file for {}.".format(
            parameter_name)

        if not zip_util.is_zip_file(parameter_value):
            check_failed = True

    else:

        message = "Check {} is not a valid check in the validators library.".format(
            condition)
        recommendation = "Contact the maintainers of the GeoProcessor software."
        check_failed = True
        fail_response = "FAIL"

    # If the check failed, increase the warning count of the command instance by one.
    if check_failed:
        self.warning_count += 1

        # If configured, log a FAILURE message about the failed check. Set the run_the_command boolean to False.
        if fail_response.upper() == "FAIL":
            self.logger.error(message)
            self.command_status.add_to_log(
                CommandPhaseType.RUN,
                CommandLogRecord(CommandStatusType.FAILURE, message,
                                 recommendation))
            run_the_command = False

        # If configured, log a WARNING message about the failed check. Set the run_the_command boolean to True.
        elif fail_response.upper() == "WARN":

            self.logger.warning(message)
            self.command_status.add_to_log(
                CommandPhaseType.RUN,
                CommandLogRecord(CommandStatusType.WARNING, message,
                                 recommendation))
            run_the_command = True

        # If configured, log a WARNING message about the failed check. Set the run_the_command boolean to False.
        elif fail_response.upper() == "WARNBUTDONOTRUN":

            self.logger.warning(message)
            self.command_status.add_to_log(
                CommandPhaseType.RUN,
                CommandLogRecord(CommandStatusType.WARNING, message,
                                 recommendation))
            run_the_command = False

    # If the check passed, set the run_the_command boolean to True.
    else:
        run_the_command = True

    # Return the run_the_command boolean.
    return run_the_command
示例#10
0
    def import_file(self, *args, **kwargs):
        """
        Loads data that has been uploaded into whatever format we need for serving.
        Expects kwarg "configuration_options" which is a list of dicts, one for each layer to import.
            each dict must contain "upload_layer_id" referencing the UploadLayer being imported
            and must contain "index" which is a 0-based index to identify which layer from the file is being referenced.
            and can contain an optional "layer_name" to assign a custom name.  "layer_name" may be ignored
            if it is already in use.
        """
        filename = self.file
        self.completed_layers = []
        err = GdalErrorHandler()
        gdal.PushErrorHandler(err.handler)
        gdal.UseExceptions()
        ogr.UseExceptions()
        configuration_options = kwargs.get('configuration_options', [{
            'index': 0
        }])
        # Configuration options should be a list at this point since the
        # importer can process multiple layers in a single import
        if isinstance(configuration_options, dict):
            configuration_options = [configuration_options]

        # Ensure that upload_layer_id exists in configuration for each layer
        nbad_config = 0
        for co in configuration_options:
            if 'upload_layer_id' not in co:
                nbad_config += 1

        if nbad_config > 0:
            msg = '{} of {} configs missing upload_layer_id'.format(
                nbad_config, len(configuration_options))
            logger.critical(msg)
            raise Exception(msg)

        # --- Resolve any disparity between automatically-assigned UploadLayer.layer_name and layer_name in
        # configuration options.
        # If layer_name is present in configuration_options either update UploadLayer.layer_name to match if it's unique
        #    or update configuration_options' 'layer_name' to match value in UploadLayer.layer_name if it's not unique.
        with db.transaction.atomic():
            upload_layer_ids = [
                co['upload_layer_id'] for co in configuration_options
            ]
            upload_layers = UploadLayer.objects.filter(id__in=upload_layer_ids)
            upload_layers_by_id = {ul.id: ul for ul in upload_layers}

            for co in configuration_options:
                ul = upload_layers_by_id[co['upload_layer_id']]
                if co.get('layer_name') is None:
                    co['layer_name'] = ul.layer_name
                elif co['layer_name'] != ul.layer_name:
                    if UploadLayer.objects.filter(
                            layer_name=co['layer_name']).exists():
                        co['layer_name'] = ul.layer_name
                    else:
                        ul.layer_name = co['layer_name']
                        ul.save()

        data, inspector = self.open_source_datastore(filename, *args, **kwargs)

        datastore_layers = inspector.describe_fields()

        if len(datastore_layers) == 0:
            logger.debug('No Dataset found')

        layers_info = []

        # It looks like this code allowed users to configure a portion of layers in the file by specifying an
        # index or a 'layer_name' option.  I'm not sure if lookups by 'layer_name' are still being used anywhere.
        # 'layer_name' now specifies the name to give to a layer on import to geonode.  If the previous
        # behavior is needed, add a 'internal_layer_name' value to the configuration options using the name
        # of the layer the file uses.
        lookup_fields = ['index', 'internal_layer_name']
        for layer_configuration in configuration_options:
            lookup_found = False
            for lf in lookup_fields:
                if lf in layer_configuration:
                    lookup_found = True
                    break

            if not lookup_found:
                logger.warn(
                    'No recognized layer lookup field provided in configuration options, should be one of {}'
                    .format(lookup_fields))
                continue

            for datastore_layer in datastore_layers:
                for lf in lookup_fields:
                    if (lf in datastore_layer and lf in layer_configuration
                            and datastore_layer.get(lf)
                            == layer_configuration.get(lf)):
                        # This update will overwrite the layer_name passed in configuration_options, stash the
                        #    intended name so we can correct it.
                        msg = 'Will configure layer from file {} identifed by field "{}" with value {}'\
                                  .format(self.file, lf, layer_configuration[lf])
                        logger.info(msg)
                        intended_layer_name = layer_configuration.get(
                            'layer_name')
                        layer_configuration.update(datastore_layer)
                        if intended_layer_name:
                            layer_configuration.update(
                                {'layer_name': intended_layer_name})
                        else:
                            msg = (
                                'layer_name not provided in configuration options, will use name provided '
                                'by inspector which will likely lead to name collisions'
                            )
                            logger.warn(msg)

                        layers_info.append(layer_configuration)

        for layer_options in layers_info:
            if layer_options['layer_type'] == 'tile' and layer_options.get(
                    'driver', '').lower() == 'gpkg':
                # No special processing is needed on import, the only thing needed is a copy of the
                #    file which was made on upload.  Config for publishing is done
                #    in handlers.mapproxy.publish_handler.MapProxyGPKGTilePublishHandler
                self.completed_layers.append(
                    [layer_options['layer_name'], layer_options])
            elif layer_options['layer_type'] == 'raster':
                """
                File is a raster, we need to convert into optimized GeoTiff
                and skip any further testing or loading into target_store
                """
                #  Increment filename to make sure target doesn't exists
                filedir, filebase = os.path.split(filename)
                outfile = "{}/{}.tif".format(
                    filedir, layer_options['layer_name'].lower())
                fileout = increment_filename(
                    os.path.join(RASTER_FILES, outfile))
                raster_import(layer_options['path'], fileout)
                self.completed_layers.append([fileout, layer_options])
            elif layer_options['layer_type'] == 'vector':
                target_file, _ = self.open_target_datastore(self.target_store)
                target_create_options = []

                # Prevent numeric field overflow for shapefiles https://trac.osgeo.org/gdal/ticket/5241
                if target_file.GetDriver().GetName() == 'PostgreSQL':
                    target_create_options.append('PRECISION=NO')
                    os.environ["PGCLIENTENCODING"] = "UTF8"
                    # Hack for CSV ingest into postgres. When using COPY, OGR prepends a bad newline to each feature
                    if data.GetDriver().ShortName.lower() == 'csv':
                        os.environ["PG_USE_COPY"] = "false"
                    else:
                        os.environ["PG_USE_COPY"] = "true"

                layer_options['encoding'] = 'utf-8'
                # Read encoding from cpg file if exist
                cpg_file = "{}.cpg".format(os.path.splitext(filename)[0])

                if os.path.isfile(cpg_file):
                    _encoding = open(cpg_file).read()
                    _parts = _encoding.split()
                    if len(_parts) > 1:
                        # attempt to cover a case where encoding
                        # is similar to ANSI 1252 (cp1252)
                        _encoding = "cp{}".format(_parts[-1])

                    try:
                        codecs.lookup(_encoding)
                        layer_options['encoding'] = _encoding
                    except LookupError:
                        pass

                logger.debug('attribute encoding: {}'.format(
                    layer_options['encoding']))
                if data.GetDriver().ShortName.lower() == 'esri shapefile':
                    os.environ['SHAPE_ENCODING'] = layer_options['encoding']

                layer_options['modified_fields'] = {}
                layer = data.GetLayer(layer_options.get('index'))
                layer_name = layer_options['layer_name']
                layer_geom_type = self.get_layer_type(layer, data)
                srs = layer.GetSpatialRef()

                # default the layer to 4326 if a spatial reference is not provided
                if not srs:
                    srs = osr.SpatialReference()
                    srs.ImportFromEPSG(4326)

                # pass the srs authority code to handlers
                if srs.AutoIdentifyEPSG() == 0:
                    layer_options['srs'] = '{0}:{1}'.format(
                        srs.GetAuthorityName(None), srs.GetAuthorityCode(None))
                else:
                    # layer_options['srs'] = convert_wkt_to_epsg(srs.ExportToWkt())
                    layer_ids = []
                    for configuration_option in configuration_options:
                        layer_ids = [configuration_option['upload_layer_id']]
                    layer_id = layer_ids[0]
                    layer_path = os.path.dirname(filename)
                    original_layer_name = layer.GetName()
                    layer_options['srs'] = reproject_coordinate_system(
                        original_layer_name, layer_name, layer, layer_path)
                    data, inspector = self.open_source_datastore(
                        filename, *args, **kwargs)
                    target_file, _ = self.open_target_datastore(
                        self.target_store)
                    layer = data.GetLayer(layer_options.get('index'))
                    srs = layer.GetSpatialRef()

                logger.info('Creating dataset "{}" from file "{}"'.format(
                    layer_name, target_file))
                target_layer, created = self.get_or_create_target_dataset(
                    target_file,
                    str(layer_name),
                    srs,
                    layer_geom_type,
                    options=target_create_options)

                if not created:
                    # if the layer wasn't created, threre's no need for
                    # further processing lets just return it. This could happen
                    # if the user is retrying a previously failed import
                    self.completed_layers.append(
                        [target_layer.GetName(), layer_options])
                    return self.completed_layers

                # adding fields to new layer
                layer_definition = ogr.Feature(layer.GetLayerDefn())
                source_fid = None

                wkb_field = 0

                for i in range(layer_definition.GetFieldCount()):

                    field_def = layer_definition.GetFieldDefnRef(i)
                    field_name = field_def.GetName()
                    try:
                        field_name = field_name.decode('utf-8')
                    except UnicodeDecodeError as e:
                        logger.error('Error Decoding {} - {}'.format(
                            field_name, str(e)))
                        field_def.SetName(
                            str(field_name.decode('utf-8', 'ignore')))

                    if field_def.GetName() == target_layer.GetFIDColumn(
                    ) and field_def.GetType() != 0:
                        field_def.SetType(0)

                    if field_def.GetName() != 'wkb_geometry':
                        target_layer.CreateField(field_def)
                        new_name = target_layer.GetLayerDefn().GetFieldDefn(
                            i - wkb_field).GetName()
                        old_name = field_def.GetName()

                        if new_name != old_name:
                            layer_options['modified_fields'][
                                old_name] = new_name

                        if old_name == target_layer.GetFIDColumn(
                        ) and not layer.GetFIDColumn():
                            source_fid = i
                    else:
                        wkb_field = 1

                if wkb_field is not 0:
                    layer.SetIgnoredFields(['wkb_geometry'])

                for feature in layer:
                    if feature and feature.geometry():

                        if not layer.GetFIDColumn():
                            feature.SetFID(-1)

                        if feature.geometry().GetGeometryType() != target_layer.GetGeomType() and \
                                target_layer.GetGeomType() in range(4, 7):

                            if target_layer.GetGeomType() == 5:
                                conversion_function = ogr.ForceToMultiLineString
                            elif target_layer.GetGeomType() == 4:
                                conversion_function = ogr.ForceToMultiPoint
                            else:
                                conversion_function = ogr.ForceToMultiPolygon

                            geom = ogr.CreateGeometryFromWkb(
                                feature.geometry().ExportToWkb())
                            feature.SetGeometry(conversion_function(geom))

                        if source_fid is not None:
                            feature.SetFID(feature.GetField(source_fid))

                        # Force encoding for all text fields
                        for field in range(0, feature.GetFieldCount()):
                            if feature.GetFieldType(field) == ogr.OFTString:
                                fieldstr = feature.GetField(field)
                                # First try to decode as latin1 (default encoding for shapefiles)
                                try:
                                    decodedfield = fieldstr.decode(
                                        layer_options['encoding'],
                                        errors='strict')
                                except UnicodeDecodeError:
                                    decodedfield = fieldstr.decode(
                                        errors='ignore')
                                except AttributeError:
                                    continue
                                feature.SetField(field, decodedfield)
                        target_layer.CreateFeature(feature)
                layer.ResetReading()
                self.completed_layers.append(
                    [target_layer.GetName(), layer_options])
            else:
                msg = 'Unexpected layer type: "{}"'.format(
                    layer_options['layer_type'])
                logger.error(msg)
                raise Exception(msg)

        return self.completed_layers
示例#11
0
def prepare_stream(input_stream_dataset, copy_stream_dataset, stream_id_column,
                   tolerance):
    ogr.UseExceptions()

    global printer
    if not printer:
        printer = PassPrint()

    try:
        printer.msg("Opening dataset..")
        input_streams_ds = feature_utils.getFeatureDataset(
            input_stream_dataset)
        input_streams_layer = input_streams_ds.GetLayer()

        streams_ds = feature_utils.copyFeatureDatasetAsEmpty(
            input_streams_ds,
            output_path=copy_stream_dataset,
            overwrite=True,
            new_geom_type=ogr.wkbLineString)
        streams_layer = streams_ds.GetLayer()
        streams_defn = streams_layer.GetLayerDefn()

        # check for stream id column
        printer.msg("Checking attributes..")
        input_fields = feature_utils.getFields(input_streams_layer)
        input_sid_field = None
        for field in input_fields:
            if field['name'].lower() == stream_id_column.lower():
                input_sid_field = field
                break
        if not input_sid_field:
            printer.warn("  stream ID column not found, creating..")
            feature_utils.createFieldDefinition(stream_id_column, int)
        fields = feature_utils.getFields(streams_layer)
        field_sid = None
        for f in fields:
            if f['name'] == stream_id_column:
                field_sid = f
                break

        printer.msg("Copying features..")
        _copy_feature(input_streams_layer, streams_layer, streams_defn,
                      input_fields, input_sid_field, stream_id_column)

        input_streams_layer = None
        input_streams_ds = None

        # printer.msg("Snapping endpoints..")
        # new_features = _snap_endpoints(streams_layer, streams_defn, fields)
        # if len(new_features):
        #     printer.msg("  adding split features..")
        #     streams_ds, streams_layer, streams_defn = _replace_and_update(streams_ds, copy_stream_dataset, new_features)

        printer.msg("Checking intersections..")
        new_features = _split_intersections(streams_layer, streams_defn,
                                            fields, field_sid)
        if len(new_features):
            printer.msg("  adding split features..")
            streams_ds, streams_layer, streams_defn = _replace_and_update(
                streams_ds, copy_stream_dataset, new_features)

        printer.msg("Assigning new stream IDs..")
        _assign_stream_ids(streams_layer, field_sid)

    except Exception:
        input_streams_layer = None
        input_streams_ds = None
        streams_layer = None
        streams_defn = None
        streams_ds = None
        raise
示例#12
0
def create_ogr_download(sql_url, data, table_metadata, valid_geo_ids,
                        file_ident, out_filename, format):
    import ogr
    import osr
    format_info = supported_formats[format]
    driver_name = format_info['driver']
    ogr.UseExceptions()
    in_driver = ogr.GetDriverByName("PostgreSQL")
    host, user, password, database = get_sql_config(sql_url)
    conn = in_driver.Open("PG: host=%s dbname=%s user=%s password=%s" %
                          (host, database, user, password))

    if conn is None:
        raise Exception("Could not connect to database to generate download.")

    out_driver = ogr.GetDriverByName(driver_name)
    out_srs = osr.SpatialReference()
    out_srs.ImportFromEPSG(4326)
    out_data = out_driver.CreateDataSource(out_filename)
    # See http://gis.stackexchange.com/questions/53920/ogr-createlayer-returns-typeerror
    out_layer = out_data.CreateLayer(file_ident.encode('utf-8'),
                                     srs=out_srs,
                                     geom_type=ogr.wkbMultiPolygon)
    out_layer.CreateField(ogr.FieldDefn('geoid', ogr.OFTString))
    out_layer.CreateField(ogr.FieldDefn('name', ogr.OFTString))
    for (table_id, table) in table_metadata.iteritems():
        for column_id, column_info in table['columns'].iteritems():
            column_name_utf8 = column_id.encode('utf-8')
            if driver_name == "ESRI Shapefile":
                # Work around the Shapefile column name length limits
                out_layer.CreateField(
                    ogr.FieldDefn(column_name_utf8, ogr.OFTReal))
                out_layer.CreateField(
                    ogr.FieldDefn(column_name_utf8 + "e", ogr.OFTReal))
            else:
                out_layer.CreateField(
                    ogr.FieldDefn(column_name_utf8, ogr.OFTReal))
                out_layer.CreateField(
                    ogr.FieldDefn(column_name_utf8 + ", Error", ogr.OFTReal))

    # this SQL echoed in Excel export but no geom so copying instead of factoring out
    sql = """SELECT geom,full_geoid,display_name
             FROM tiger2014.census_name_lookup
             WHERE full_geoid IN (%s)
             ORDER BY full_geoid""" % ', '.join("'%s'" % g.encode('utf-8')
                                                for g in valid_geo_ids)
    in_layer = conn.ExecuteSQL(sql)

    in_feat = in_layer.GetNextFeature()
    while in_feat is not None:
        out_feat = ogr.Feature(out_layer.GetLayerDefn())
        if format in ('shp', 'kml', 'geojson'):
            out_feat.SetGeometry(in_feat.GetGeometryRef())
        geoid = in_feat.GetField('full_geoid')
        out_feat.SetField('geoid', geoid)
        out_feat.SetField('name', in_feat.GetField('display_name'))
        for (table_id, table) in table_metadata.iteritems():
            table_estimates = data[geoid][table_id]['estimate']
            table_errors = data[geoid][table_id]['error']
            for column_id, column_info in table['columns'].iteritems():
                column_name_utf8 = column_id.encode('utf-8')
                if column_id in table_estimates:
                    if format == 'shp':
                        # Work around the Shapefile column name length limits
                        estimate_col_name = column_name_utf8
                        error_col_name = column_name_utf8 + "e"
                    else:
                        estimate_col_name = column_name_utf8
                        error_col_name = column_name_utf8 + ", Error"

                    out_feat.SetField(estimate_col_name,
                                      table_estimates[column_id])
                    out_feat.SetField(error_col_name, table_errors[column_id])

        out_layer.CreateFeature(out_feat)
        in_feat.Destroy()
        in_feat = in_layer.GetNextFeature()
    out_data.Destroy()
示例#13
0
文件: network.py 项目: krm75/arcGNAT
    def _nx_to_shp(self, G, out_shp, bool_node):
        """
        This is a re-purposing of the NetworkX write_shp module with some minor changes.
        :param G: networkx directional graph
        :param out_dir: directory where output shapefiles will be written
        """

        # easier to debug in python if ogr throws exceptions
        ogr.UseExceptions()

        # set spatial reference for output shapefile
        srs = osr.SpatialReference()
        srs.ImportFromWkt(self.srs)

        def netgeometry(key, data):
            if 'Wkb' in data:
                geom = ogr.CreateGeometryFromWkb(data['Wkb'])
            elif 'Wkt' in data:
                geom = ogr.CreateGeometryFromWkt(data['Wkt'])
            elif type(
                    key[0]).__name__ == 'tuple':  # edge keys are packed tuples
                geom = ogr.Geometry(ogr.wkbLineString)
                _from, _to = key[0], key[1]
                try:
                    geom.SetPoint(0, *_from)
                    geom.SetPoint(1, *_to)
                except TypeError:
                    # assume user used tuple of int and choked ogr
                    _ffrom = [float(x) for x in _from]
                    _fto = [float(x) for x in _to]
                    geom.SetPoint(0, *_ffrom)
                    geom.SetPoint(1, *_fto)
            else:
                geom = ogr.Geometry(ogr.wkbPoint)
                try:
                    geom.SetPoint(0, *key)
                except TypeError:
                    # assume user used tuple of int and choked ogr
                    fkey = [float(x) for x in key]
                    geom.SetPoint(0, *fkey)

            return geom

        # Create_feature with new optional attributes arg (should be dict type)
        def create_feature(geometry, lyr, attributes=None):
            feature = ogr.Feature(lyr.GetLayerDefn())
            feature.SetGeometry(g)
            if attributes != None:
                # Loop through attributes, assigning data to each field
                for field, data in attributes.items():
                    feature.SetField(field, data)
            lyr.CreateFeature(feature)
            feature.Destroy()

        def build_attrb_dict(self, lyr, data, fields):
            from collections import OrderedDict
            attributes = OrderedDict()
            ordered_data = order_attributes(self, data)
            # Loop through attribute data in edges dictionary
            for key, data in ordered_data.items():
                # Reject spatial data not required for attribute table
                if (key != 'Json' and key != 'Wkt' and key != 'Wkb'
                        and key != 'ShpName'):
                    # For all edges check/add field and data type to fields dict
                    # if key not in fields:
                    if key not in fields:
                        # Field not in previous edges so add to dict
                        if type(data) in OGRTypes:
                            fields[key] = OGRTypes[type(data)]
                        else:
                            # Data type not supported, default to string (char 80)
                            fields[key] = ogr.OFTString
                        newfield = ogr.FieldDefn(key, fields[key])
                        lyr.CreateField(newfield)
                        # Store the data from new field to dict for CreateLayer()
                        attributes[key] = data
                    else:
                        # Field already exists, add data to dict for CreateLayer()
                        attributes[key] = data
            return attributes

        def order_attributes(self, attrb_dict):
            # order dictionary attributes into list based on field list from input shapefile
            from collections import OrderedDict
            ordered_attrb = OrderedDict()
            for f in self.fields:
                for k, v in attrb_dict.iteritems():
                    if k == f:
                        ordered_attrb.update({k: v})
            for k, v, in attrb_dict.iteritems():
                if k.startswith('_') and k.endswith('_'):
                    ordered_attrb.update({k: v})
            return ordered_attrb

        # Set up output shapefiles
        base_name = os.path.basename(out_shp)
        shp_name = os.path.splitext(base_name)[0]
        shp_name = shp_name.encode('utf-8')
        dir_name = os.path.dirname(out_shp)
        node_name = "{}_nodes".format(shp_name)
        edge_name = "{}_edges".format(shp_name)
        drv = ogr.GetDriverByName("ESRI Shapefile")
        shpdir = drv.CreateDataSource("{0}".format(dir_name))

        # Conversion dict between python and ogr types
        OGRTypes = {
            int: ogr.OFTInteger,
            str: ogr.OFTString,
            float: ogr.OFTReal
        }

        # Write nodes
        if bool_node:
            try:
                shpdir.DeleteLayer(node_name)
            except:
                pass
            nodes = shpdir.CreateLayer(node_name, srs, ogr.wkbPoint)
            # New edge attribute write support merged into edge loop
            n_fields = {}  # storage for field names and their data types

            # Node loop
            for n in G:
                data = G.node[n]
                g = netgeometry(n, data)
                n_attributes = build_attrb_dict(self, nodes, data, n_fields)
                create_feature(g, nodes, n_attributes)
            nodes = None

        # Write edges
        try:
            shpdir.DeleteLayer(shp_name)
        except:
            pass

        edges = shpdir.CreateLayer(shp_name, srs, ogr.wkbLineString)
        # New edge attribute write support merged into edge loop
        e_fields = {}  # storage for field names and their data types

        # Edge loop
        for u, v, k, data in G.edges_iter(data=True, keys=True):
            g = netgeometry(k, data)
            e_attributes = build_attrb_dict(self, edges, data, e_fields)
            # Create the feature with geometry, passing new attribute data
            create_feature(g, edges, e_attributes)
        edges = None
        return
示例#14
0
def getCatchmentFeaturesForReaches(config,
                                   outputDir,
                                   catchmentFilename,
                                   reaches,
                                   format=OGR_SHAPEFILE_DRIVER_NAME):
    """ Get features (in WGS 84) for the drainage area associated with a
        set of NHD (National Hydrography Dataset) stream reaches.
        
        @param config A Python ConfigParser containing the following
        sections and options:
            'PATH_OF_NHDPLUS2_CATCHMENT' (absolute path to
            NHD catchment shapefile)
        @param outputDir String representing the absolute/relative
        path of the directory into which output rasters should be
        written
        @param catchmentFilename String representing name of file to
        save catchment features to.  The appropriate extension will be added to the file name
        @param reaches List representing catchment features to be output
        @param format String representing OGR driver to use
        
        @return String representing the name of the dataset in outputDir created to hold
        the features
         
        @raise ConfigParser.NoSectionError
        @raise ConfigParser.NoOptionError
        @raise IOError(errno.ENOTDIR) if outputDir is not a directory
        @raise IOError(errno.EACCESS) if outputDir is not writable
        @raise Exception if output format is not known
        
        @todo Detect and fix non-closed geometries, e.g.
        kalisti:archive miles$ ./GetCatchmentsForComidsSP.py -p test -c 10462287
        Traceback (most recent call last):
          File "./GetCatchmentsForComidsSP.py", line 29, in <module>
            catchmentFilename, comid)
          File "/Users/miles/Dropbox/EarthCube-Multilayered/RHESSys-workflow/eclipse/EcohydroWorkflowLib/ecohydrolib/nhdplus2/networkanalysis.py", line 506, in getCatchmentFeaturesForComid
            outGeom = outGeom.Union( inGeom )
          File "/usr/local/Cellar/python/2.7.5/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/osgeo/ogr.py", line 4065, in Union
            return _ogr.Geometry_Union(self, *args)
        RuntimeError: TopologyException: found non-noded intersection between LINESTRING (-77.9145 37.0768, -77.9147 37.0768) and LINESTRING (-77.9147 37.0768, -77.9145 37.0768) at -77.914621661942761 37.076822779115943
    
    """
    catchmentFeatureDBPath = config.get('NHDPLUS2',
                                        'PATH_OF_NHDPLUS2_CATCHMENT')
    if not os.access(catchmentFeatureDBPath, os.R_OK):
        raise IOError(
            errno.EACCES, "The catchment feature DB at %s is not readable" %
            catchmentFeatureDBPath)
    catchmentFeatureDBPath = os.path.abspath(catchmentFeatureDBPath)

    if not os.path.isdir(outputDir):
        raise IOError(errno.ENOTDIR,
                      "Output directory %s is not a directory" % (outputDir, ))
    if not os.access(outputDir, os.W_OK):
        raise IOError(
            errno.EACCES,
            "Not allowed to write to output directory %s" % (outputDir, ))
    outputDir = os.path.abspath(outputDir)

    if not format in list(OGR_DRIVERS.keys()):
        raise Exception("Output format '%s' is not known" % (format, ))

    catchmentFilename = "%s%s%s" % (catchmentFilename, os.extsep,
                                    OGR_DRIVERS[format])
    catchmentFilepath = os.path.join(outputDir, catchmentFilename)

    # Open input layer
    ogr.UseExceptions()
    poDS = ogr.Open(catchmentFeatureDBPath, OGR_UPDATE_MODE)
    if not poDS:
        raise Exception("Unable to open catchment feature database %s" (
            catchmentFeatureDBPath, ))
    assert (poDS.GetLayerCount() > 0)
    poLayer = poDS.GetLayer(0)
    assert (poLayer)

    # Create output data source
    poDriver = ogr.GetDriverByName(format)
    assert (poDriver)
    poODS = poDriver.CreateDataSource(catchmentFilepath)
    assert (poODS != None)
    #    poOLayer = poODS.CreateLayer("catchment", poLayer.GetSpatialRef(), poLayer.GetGeomType())
    poOLayer = poODS.CreateLayer("catchment", poLayer.GetSpatialRef(),
                                 ogr.wkbMultiPolygon)
    #    poOLayer = poODS.CreateLayer("catchment", poLayer.GetSpatialRef(), ogr.wkbPolygon )

    # Create fields in output layer
    layerDefn = poLayer.GetLayerDefn()
    i = 0
    fieldCount = layerDefn.GetFieldCount()
    while i < fieldCount:
        fieldDefn = layerDefn.GetFieldDefn(i)
        poOLayer.CreateField(fieldDefn)
        i = i + 1

    # Create single geometry to hold catchment polygon in output shapefile
    outGeom = ogr.Geometry(poOLayer.GetGeomType())
    #    polygon = Polygon()

    # Copy features, unioning them as we go
    numReaches = len(reaches)
    # Copy features in batches of UPSTREAM_SEARCH_THRESHOLD to overcome limit in
    #   OGR driver for input layer
    start = 0
    end = UPSTREAM_SEARCH_THRESHOLD
    while end < numReaches:
        whereFilter = "featureid=%s" % (reaches[start], )
        for reach in reaches[start + 1:end]:
            whereFilter = whereFilter + " OR featureid=%s" % (reach, )
        # Copy features
        assert (poLayer.SetAttributeFilter(whereFilter) == 0)
        inFeature = poLayer.GetNextFeature()
        # Union geometry of input feature to output feature
        while inFeature:
            #            inGeom = inFeature.GetGeometryRef().SimplifyPreserveTopology(0.0001)
            inGeom = inFeature.GetGeometryRef()
            outGeom = outGeom.Union(inGeom)
            #            polygon = polygon.union( loads( inGeom.ExportToWkb() ) )
            #            polygon = cascaded_union( [polygon, loads( inGeom.ExportToWkb() )] )
            inFeature.Destroy()
            inFeature = poLayer.GetNextFeature()
        start = end
        end = end + UPSTREAM_SEARCH_THRESHOLD
    # Copy remaining features
    whereFilter = "featureid=%s" % (reaches[start], )
    for reach in reaches[start + 1:end]:
        whereFilter = whereFilter + " OR featureid=%s" % (reach, )
    # Copy features
    poLayer.SetAttributeFilter(whereFilter)
    assert (poLayer.SetAttributeFilter(whereFilter) == 0)
    inFeature = poLayer.GetNextFeature()
    while inFeature:
        #        inGeom = inFeature.GetGeometryRef().SimplifyPreserveTopology(0.0001)
        inGeom = inFeature.GetGeometryRef()
        outGeom = outGeom.Union(inGeom)
        #        polygon = polygon.union( loads( inGeom.ExportToWkb() ) )
        #        polygon = cascaded_union( [polygon, loads( inGeom.ExportToWkb() )] )
        inFeature.Destroy()
        inFeature = poLayer.GetNextFeature()

    # Create a new polygon that only contains exterior points
    outGeom = ogr.ForceToPolygon(outGeom)
    polygon = loads(outGeom.ExportToWkb())
    if polygon.exterior:
        coords = polygon.exterior.coords
        newPolygon = Polygon(coords)
    else:
        newPolygon = Polygon()

    # Write new feature to output feature data source
    outFeat = ogr.Feature(poOLayer.GetLayerDefn())
    outFeat.SetGeometry(ogr.CreateGeometryFromWkb(dumps(newPolygon)))
    poOLayer.CreateFeature(outFeat)

    return catchmentFilename
示例#15
0
    def checkgeometry(self, layers):
        try:
            if self.ds == None:
                return layers
            """
            layerGeometry = self._getLayerGeometry()
            gt = self.ds.GetLayer().GetGeomType()
            if layerGeometry:
                if not (
                (layerGeometry== "P" and gt ==1)
                or
                (layerGeometry== "L" and gt ==2)
                or
                (layerGeometry== "A" and gt ==3)):
                    #check fature by feature 
                    self.__checkGeometryByFeature(layerGeometry)
            """

            #check if the layer is within the AOI

            # Create a Polygon from the extent of the layer
            aoi_geometry = None
            for layer_object in layers['AOI']['LayerObject']:
                layer_object
                aoi_geometry = shapely.wkt.loads(
                    layer_object.geometry().ExportToIsoWkt())
            ogr.UseExceptions()
            #aoi_extent = self.extent_calculation(aoi['AOI']['GeometryObject'].GetLayer().GetExtent())
            for key in layers:
                #layer_extent = self.extent_calculation(aoi[key]['GeometryObject']
                feature_out = 0
                for layer_object in layers[key]['LayerObject']:
                    lyr_shapely = shapely.wkt.loads(
                        layer_object.geometry().ExportToIsoWkt())
                    if not lyr_shapely.within(aoi_geometry):
                        feature_out += 1
                        if feature_out > 100:
                            break

                if feature_out > 0:
                    #print(key)
                    err = self.conf_param["logsText"]["GDB"]["geometryName"][
                        "not_in_AOI"].copy()
                    initial_err = self.activ_code + "|" + CommonFunctions.split_root(
                        self, self.root, self.activ_code
                    ) + "|" + key + "|" + self.logFile.getCatValue(
                        self.conf_param['VectorFormats'][self.type]
                        ['not_equal_AOI']) + "|" + self.logFile.getIssueValue(
                            self.conf_param['VectorFormats'][
                                self.type]['not_equal_AOI']) + "|"
                    err.insert(0, initial_err)
                    err.insert(2, key)
                    numFeat = str(feature_out)
                    if (feature_out > 100):
                        numFeat = " MORE THAN 100 "

                    err.insert(4, str(numFeat) + " ")
                    self.logFile.writelogs(err)
        except Exception as ex:
            print(ex)
        """