Esempio n. 1
0
def ogr_factory_3():

    src_wkt = 'POLYGON((0 0,100 0,100 100,0 0))'
    exp_wkt = 'MULTIPOLYGON (((0 0,100 0,100 100,0 0)))'

    src_geom = ogr.CreateGeometryFromWkt(src_wkt)
    dst_geom = ogr.ForceToMultiPolygon(src_geom)

    if ogrtest.check_feature_geometry(dst_geom, exp_wkt):
        print(dst_geom.ExportToWkt())
        return 'fail'

    src_wkt = 'GEOMETRYCOLLECTION(POLYGON((0 0,100 0,100 100,0 0)))'
    exp_wkt = 'MULTIPOLYGON (((0 0,100 0,100 100,0 0)))'

    src_geom = ogr.CreateGeometryFromWkt(src_wkt)
    dst_geom = ogr.ForceToMultiPolygon(src_geom)

    if ogrtest.check_feature_geometry(dst_geom, exp_wkt):
        print(dst_geom.ExportToWkt())
        return 'fail'

    return 'success'
Esempio n. 2
0
def ogr_factory_6():

    src_wkt_list = [
        None,
        'POINT EMPTY',
        'LINESTRING EMPTY',
        'POLYGON EMPTY',
        'MULTIPOINT EMPTY',
        'MULTILINESTRING EMPTY',
        'MULTIPOLYGON EMPTY',
        'GEOMETRYCOLLECTION EMPTY',
        'POINT(0 0)',
        'LINESTRING(0 0)',
        'POLYGON((0 0))',
        'POLYGON(EMPTY,(0 0),EMPTY,(1 1))',
        'MULTIPOINT(EMPTY,(0 0),EMPTY,(1 1))',
        'MULTILINESTRING(EMPTY,(0 0),EMPTY,(1 1))',
        'MULTIPOLYGON(((0 0),EMPTY,(1 1)),EMPTY,((2 2)))',
        'GEOMETRYCOLLECTION(POINT EMPTY)',
        'GEOMETRYCOLLECTION(LINESTRING EMPTY)',
        'GEOMETRYCOLLECTION(POLYGON EMPTY)',
        'GEOMETRYCOLLECTION(MULTIPOINT EMPTY)',
        'GEOMETRYCOLLECTION(MULTILINESTRING EMPTY)',
        'GEOMETRYCOLLECTION(MULTIPOLYGON EMPTY)',
        'GEOMETRYCOLLECTION(GEOMETRYCOLLECTION EMPTY)',
        'GEOMETRYCOLLECTION(POINT(0 0))',
        'GEOMETRYCOLLECTION(LINESTRING(0 0),LINESTRING(1 1))',
        'GEOMETRYCOLLECTION(POLYGON((0 0),EMPTY,(2 2)), POLYGON((1 1)))',
    ]

    for src_wkt in src_wkt_list:
        if src_wkt is None:
            src_geom = None
        else:
            src_geom = ogr.CreateGeometryFromWkt(src_wkt)
        dst_geom1 = ogr.ForceToPolygon(src_geom)
        dst_geom2 = ogr.ForceToMultiPolygon(src_geom)
        dst_geom3 = ogr.ForceToMultiPoint(src_geom)
        dst_geom4 = ogr.ForceToMultiLineString(src_geom)
        dst_geom5 = ogr.ForceToLineString(src_geom)
        #print(src_geom.ExportToWkt(), dst_geom1.ExportToWkt(), dst_geom2.ExportToWkt(), dst_geom3.ExportToWkt(), dst_geom4.ExportToWkt())

    return 'success'
Esempio n. 3
0
def kmz_converter():
    # open the input KMZ file
    kmz_file = str(sys.argv[1])
    data_source = open_kmz(kmz_file)

    # create the output shapefiles
    points_shp_name = set_output_filename(kmz_file, 'points')
    lines_shp_name = set_output_filename(kmz_file, 'lines')
    polygons_shp_name = set_output_filename(kmz_file, 'polygons')

    points_datastore = create_output_datastore(points_shp_name)
    points_layer = create_output_layer(points_datastore, ogr.wkbMultiPoint)
    add_fields(points_layer)

    lines_datastore = create_output_datastore(lines_shp_name)
    lines_layer = create_output_layer(lines_datastore, ogr.wkbMultiLineString)
    add_fields(lines_layer)

    polygons_datastore = create_output_datastore(polygons_shp_name)
    polygons_layer = create_output_layer(polygons_datastore,
                                         ogr.wkbMultiPolygon)
    add_fields(polygons_layer)

    # loop through the layers
    feature_counter = 0
    points_counter = 0
    lines_counter = 0
    polygons_counter = 0

    layer_count = data_source.GetLayerCount()
    for i in range(0, layer_count):
        layer = data_source.GetLayer(i)
        layer_info = {}
        layer_info['feature_count'] = layer.GetFeatureCount()
        layer_info['name'] = layer.GetName()

        # loop through the features in each layer
        for feature in layer:
            feature_counter += 1
            geom = feature.GetGeometryRef()
            geom_type = geom.GetGeometryName()
            field_names = ['Name', 'descriptio', 'icon', 'snippet']

            if geom_type in ('POINT', 'MULTIPOINT'):
                points_counter += 1
                layer_defn = points_layer.GetLayerDefn()
                out_feature = ogr.Feature(layer_defn)
                out_geom = ogr.ForceToMultiPoint(geom)

            elif geom_type in ('LINESTRING', 'MULTILINESTRING'):
                lines_counter += 1
                layer_defn = lines_layer.GetLayerDefn()
                out_feature = ogr.Feature(layer_defn)
                out_geom = ogr.ForceToMultiLineString(geom)

            elif geom_type in ('POLYGON', 'MULTIPOLYGON'):
                polygons_counter += 1
                layer_defn = polygons_layer.GetLayerDefn()
                out_feature = ogr.Feature(layer_defn)
                out_geom = ogr.ForceToMultiPolygon(geom)

            else:
                continue

            # convert to 2D
            out_geom.FlattenTo2D()

            # set the output feature geometry
            out_feature.SetGeometry(out_geom)

            # set the output feature attributes
            for field_name in field_names:
                try:
                    out_feature.SetField(field_name,
                                         feature.GetField(field_name))
                except:
                    pass

            out_feature.SetField('layer_name', layer.GetName())
            out_feature.SetField('id', feature_counter)

            # write the output feature to shapefile
            if geom_type in ('POINT', 'MULTIPOINT'):
                points_layer.CreateFeature(out_feature)
            elif geom_type in ('LINESTRING', 'MULTILINESTRING'):
                lines_layer.CreateFeature(out_feature)
            elif geom_type in ('POLYGON', 'MULTIPOLYGON'):
                polygons_layer.CreateFeature(out_feature)

            # clear the output feature variable
            out_feature = None

        # reset the layer reading in case it needs to be re-read later
        layer.ResetReading()

        print layer_info['name'], feature_counter

    # print counts
    print '\nSUMMARY COUNTS'
    print "Feature count: %s" % feature_counter
    print "Points count: %s" % points_counter
    print "Lines count: %s" % lines_counter
    print "Polygons count: %s" % polygons_counter

    # cleanup
    points_datastore = None
    points_layer = None
    lines_datastore = None
    lines_layer = None
    polygons_datastore = None
    polygons_layer = None

    # remove empty output shapefiles
    driver = ogr.GetDriverByName('ESRI Shapefile')
    if points_counter == 0:
        driver.DeleteDataSource(points_shp_name)
    if lines_counter == 0:
        driver.DeleteDataSource(lines_shp_name)
    if polygons_counter == 0:
        driver.DeleteDataSource(polygons_shp_name)
Esempio n. 4
0
#last_date = last_date_alert(postgis_ds, ALERTS_SOURCE)
#print("Most recent " + ALERTS_SOURCE + " alert:" + str(last_date))

input_ds = ogr.Open(DS_NAME)
input_layer = input_ds.GetLayer()

input_srs = input_layer.GetSpatialRef()
consolidated_alerts_srs = tmp_alerts_layer.GetSpatialRef()
transformation_srs = osr.CoordinateTransformation(input_srs,
                                                  consolidated_alerts_srs)

for input_feature in input_layer:
    doy = input_feature.GetField("label")
    feature_date_str = str(doy) + "-2018"
    feature_date = str_to_date(feature_date_str, format="%j-%Y")
    #if(feature_date >= last_date):
    new_alert_feature = ogr.Feature(tmp_alerts_layer.GetLayerDefn())

    input_geometry = input_feature.GetGeometryRef()
    input_geometry.Transform(transformation_srs)
    new_alert_feature.SetGeometry(ogr.ForceToMultiPolygon(input_geometry))

    new_alert_feature.SetField('detection_date', feature_date_str)
    new_alert_feature.SetField('sensor', 'Landsat')
    new_alert_feature.SetField('source', ALERTS_SOURCE)
    new_alert_feature.SetField('insertion_date', current_date())

    tmp_alerts_layer.CreateFeature(new_alert_feature)
    print("Insert new " + ALERTS_SOURCE + " alert:" + str(feature_date))
Esempio n. 5
0
File: geo.py Progetto: Gudinya/ambry
    def load_shapefile(self, path, logger=None):
        """Load a shapefile into the partition. Loads the features and inserts
        them using an inserter.

        :param path:
        :return:

        """

        from osgeo import ogr, osr
        from ..geo.sfschema import ogr_inv_type_map, mangle_name
        from ..orm import Column, Geometry
        from ..geo.util import get_type_from_geometry

        if path.startswith('http'):
            shape_url = path
            path = self.bundle.filesystem.download_shapefile(shape_url)

        driver = ogr.GetDriverByName("ESRI Shapefile")

        dataSource = driver.Open(path, 0)

        layer = dataSource.GetLayer()

        to_srs = ogr.osr.SpatialReference()
        to_srs.ImportFromEPSG(Geometry.DEFAULT_SRS)

        dfn = layer.GetLayerDefn()

        col_defs = []

        for i in range(0, dfn.GetFieldCount()):
            field = dfn.GetFieldDefn(i)

            col_defs.append(
                (Column.mangle_name(
                    mangle_name(
                        field.GetName())),
                    Column.types[
                        ogr_inv_type_map[
                            field.GetType()]][1]))

        col_type = None
        for c in self.table.columns:
            if c.name == 'geometry':
                col_type = c.datatype.upper()
                break

        assert col_type is not None

        with self.inserter() as ins:
            for feature in layer:
                d = {}
                for i in range(0, dfn.GetFieldCount()):
                    name, type_ = col_defs[i]
                    try:
                        d[name] = feature.GetFieldAsString(i)
                    except TypeError as e:
                        self.bundle.logger.error(
                            "Type error for column '{}', type={}: {}".format(
                                name,
                                type_,
                                e))
                        raise

                g = feature.GetGeometryRef()
                g.TransformTo(to_srs)

                type_ = get_type_from_geometry(g)

                if type_ != col_type:
                    if type_ == 'POLYGON' and col_type == 'MULTIPOLYGON':
                        g = ogr.ForceToMultiPolygon(g)
                    else:
                        raise Exception(
                            "Don't know how to handle this conversion case : {} -> {}".format(type_, col_type))

                d['geometry'] = g.ExportToWkt()

                ins.insert(d)

                if logger:
                    logger(
                        "Importing shapefile to '{}'".format(
                            self.identity.name))
Esempio n. 6
0
def write_to_ogr_dataset(ogr_driver_str, ogrDriver, dst_ds, dst_lyr, groups,
                         pairs, total, db_path_prefix, fld_defs, args):

    ## Create dataset if it does not exist
    if ogr_driver_str == 'ESRI Shapefile':
        if os.path.isfile(dst_ds):
            ds = ogrDriver.Open(dst_ds, 1)
        else:
            ds = ogrDriver.CreateDataSource(dst_ds)

    elif ogr_driver_str == 'FileGDB':
        if os.path.isdir(dst_ds):
            ds = ogrDriver.Open(dst_ds, 1)
        else:
            ds = ogrDriver.CreateDataSource(dst_ds)

    elif ogr_driver_str == 'PostgreSQL':
        # DB must already exist
        ds = ogrDriver.Open(dst_ds, 1)

    else:
        logger.error("Format {} is not supported".format(ogr_driver_str))

    if args.bp_paths:
        status = 'tape'
    else:
        status = 'online'

    if ds is not None:

        ## Create table if it does not exist
        layer = ds.GetLayerByName(dst_lyr)
        fld_list = [f.fname for f in fld_defs]

        tgt_srs = utils.osr_srs_preserve_axis_order(osr.SpatialReference())
        tgt_srs.ImportFromEPSG(args.epsg)

        if not layer:
            logger.info("Creating table...")

            layer = ds.CreateLayer(dst_lyr, tgt_srs, ogr.wkbMultiPolygon)
            if layer:
                for field_def in fld_defs:
                    field = ogr.FieldDefn(field_def.fname, field_def.ftype)
                    field.SetWidth(field_def.fwidth)
                    field.SetPrecision(field_def.fprecision)
                    layer.CreateField(field)

        ## Append Records
        if layer:
            logger.info("Appending records...")
            #### loop through records and add features
            i = 0
            for groupid in groups:
                for record in groups[groupid]:
                    i += 1
                    progress(i, total, "features written")
                    if not args.dryrun:

                        feat = ogr.Feature(layer.GetLayerDefn())
                        valid_record = True

                        ## Set attributes
                        ## Fields for scene DEM
                        if args.mode == 'scene':

                            attrib_map = {
                                'SCENEDEMID': record.sceneid,
                                'STRIPDEMID': record.stripid,
                                'STATUS': status,
                                'PAIRNAME': record.pairname,
                                'SENSOR1': record.sensor1,
                                'SENSOR2': record.sensor2,
                                'ACQDATE1':
                                record.acqdate1.strftime('%Y-%m-%d'),
                                'ACQDATE2':
                                record.acqdate2.strftime('%Y-%m-%d'),
                                'CATALOGID1': record.catid1,
                                'CATALOGID2': record.catid2,
                                'HAS_LSF': int(os.path.isfile(record.lsf_dem)),
                                'HAS_NONLSF': int(os.path.isfile(record.dem)),
                                'ALGM_VER': record.algm_version,
                                'FILESZ_DEM': record.filesz_dem,
                                'FILESZ_LSF': record.filesz_lsf,
                                'FILESZ_MT': record.filesz_mt,
                                'FILESZ_OR': record.filesz_or,
                                'PROJ4': record.proj4,
                                'EPSG': record.epsg,
                            }

                            ## Set region
                            try:
                                region = pairs[record.pairname]
                            except KeyError as e:
                                pass
                            else:
                                attrib_map['REGION'] = region

                            ## Set path folders within bucket for use if db_path_prefix specified
                            path_prefix_dirs = "{}/{}/{}".format(
                                record.pairname[:4],  # sensor
                                record.pairname[5:9],  # year
                                record.pairname[9:11],  # month"
                            )

                        ## Fields for strip DEM
                        if args.mode == 'strip':
                            attrib_map = {
                                'DEM_ID': record.stripid,
                                'PAIRNAME': record.pairname,
                                'SENSOR1': record.sensor1,
                                'SENSOR2': record.sensor2,
                                'ACQDATE1':
                                record.acqdate1.strftime('%Y-%m-%d'),
                                'ACQDATE2':
                                record.acqdate2.strftime('%Y-%m-%d'),
                                'CATALOGID1': record.catid1,
                                'CATALOGID2': record.catid2,
                                'IS_LSF': int(record.is_lsf),
                                'ALGM_VER': record.algm_version,
                                'FILESZ_DEM': record.filesz_dem,
                                'FILESZ_MT': record.filesz_mt,
                                'FILESZ_OR': record.filesz_or,
                                'PROJ4': record.proj4,
                                'EPSG': record.epsg,
                                'GEOCELL': record.geocell,
                            }

                            if record.version:
                                attrib_map['REL_VER'] = record.version
                            if record.density:
                                attrib_map['DENSITY'] = record.density
                            else:
                                attrib_map['DENSITY'] = -9999

                            ## If registration info exists
                            if len(record.reginfo_list) > 0:
                                for reginfo in record.reginfo_list:
                                    if reginfo.name == 'ICESat':
                                        attrib_map["DX"] = reginfo.dx
                                        attrib_map["DY"] = reginfo.dy
                                        attrib_map["DZ"] = reginfo.dz
                                        attrib_map["REG_SRC"] = 'ICESat'
                                        attrib_map[
                                            "NUM_GCPS"] = reginfo.num_gcps
                                        attrib_map[
                                            "MEANRESZ"] = reginfo.mean_resid_z

                            ## Set path folders within bucket for use if db_path_prefix specified
                            path_prefix_dirs = "{}/{}/{}".format(
                                record.pairname[:4],  # sensor
                                record.pairname[5:9],  # year
                                record.pairname[9:11],  # month"
                            )

                        ## Fields for tile DEM
                        if args.mode == 'tile':
                            attrib_map = {
                                'DEM_ID': record.tileid,
                                'TILE': record.tilename,
                                'NUM_COMP': record.num_components,
                                'FILESZ_DEM': record.filesz_dem,
                            }

                            ## Optional attributes
                            if record.version:
                                attrib_map['REL_VER'] = record.version
                                version = record.version
                            else:
                                version = 'novers'
                            if record.density:
                                attrib_map['DENSITY'] = record.density
                            else:
                                attrib_map['DENSITY'] = -9999
                            if record.reg_src:
                                attrib_map["REG_SRC"] = record.reg_src
                                attrib_map["NUM_GCPS"] = record.num_gcps
                            if record.mean_resid_z:
                                attrib_map["MEANRESZ"] = record.mean_resid_z

                            ## Set path folders within bucket for use if db_path_prefix specified
                            if db_path_prefix:
                                path_prefix_dirs = "{}/{}/{}".format(
                                    args.project.lower(),  # project
                                    record.res,  # resolution
                                    version  # version
                                )

                        ## Common Attributes accross all modes
                        attrib_map['INDEX_DATE'] = datetime.datetime.today(
                        ).strftime('%Y-%m-%d')
                        attrib_map['CR_DATE'] = record.creation_date.strftime(
                            '%Y-%m-%d')
                        attrib_map['ND_VALUE'] = record.ndv
                        attrib_map['DEM_RES'] = (record.xres +
                                                 record.yres) / 2.0

                        ## Set location
                        if db_path_prefix:
                            location = '{}/{}/{}/{}.tar'.format(
                                db_path_prefix,
                                args.mode,  # mode (scene, strip, tile)
                                path_prefix_dirs,  # mode-specific path prefix
                                groupid  # mode-specific group ID
                            )
                        else:
                            location = record.srcfp
                        attrib_map['LOCATION'] = location

                        ## Transfrom and write geom
                        src_srs = utils.osr_srs_preserve_axis_order(
                            osr.SpatialReference())
                        src_srs.ImportFromWkt(record.proj)

                        if not record.geom:
                            logger.error(
                                'No valid geom found, feature skipped: {}'.
                                format(record.sceneid))
                            valid_record = False
                        else:
                            temp_geom = record.geom.Clone()
                            transform = osr.CoordinateTransformation(
                                src_srs, tgt_srs)
                            try:
                                temp_geom.Transform(transform)
                            except TypeError as e:
                                logger.error(
                                    'Geom transformation failed, feature skipped: {}'
                                    .format(record.sceneid))
                                valid_record = False
                            else:

                                ## Get centroid coordinates
                                centroid = temp_geom.Centroid()
                                if 'CENT_LAT' in fld_list:
                                    attrib_map['CENT_LAT'] = centroid.GetY()
                                    attrib_map['CENT_LON'] = centroid.GetX()

                                ## If srs is geographic and geom crosses 180, split geom into 2 parts
                                if tgt_srs.IsGeographic:

                                    ## Get Lat and Lon coords in arrays
                                    lons = []
                                    lats = []
                                    ring = temp_geom.GetGeometryRef(
                                        0)  #### assumes a 1 part polygon
                                    for j in range(0, ring.GetPointCount()):
                                        pt = ring.GetPoint(j)
                                        lons.append(pt[0])
                                        lats.append(pt[1])

                                    ## Test if image crosses 180
                                    if max(lons) - min(lons) > 180:
                                        split_geom = wrap_180(temp_geom)
                                        feat_geom = split_geom
                                    else:
                                        mp_geom = ogr.ForceToMultiPolygon(
                                            temp_geom)
                                        feat_geom = mp_geom

                                else:
                                    mp_geom = ogr.ForceToMultiPolygon(
                                        temp_geom)
                                    feat_geom = mp_geom

                        ## Write feature
                        if valid_record:
                            for fld, val in attrib_map.items():
                                feat.SetField(fld, val)
                            feat.SetGeometry(feat_geom)

                            ## Add new feature to layer
                            if ogr_driver_str in ('PostgreSQL'):
                                layer.StartTransaction()
                                layer.CreateFeature(feat)
                                layer.CommitTransaction()
                            else:
                                layer.CreateFeature(feat)

        else:
            logger.error('Cannot open layer: {}'.format(dst_lyr))

        ds = None

    else:
        logger.info("Cannot open dataset: {}".format(dst_ds))

    logger.info("Done")
Esempio n. 7
0
def write_to_ogr_dataset(ogr_driver_str, ogrDriver, dst_ds, dst_lyr, groups,
                         pairs, total, db_path_prefix, fld_defs, args):

    ## Create dataset if it does not exist
    if ogr_driver_str == 'ESRI Shapefile':
        max_fld_width = 254
        if os.path.isfile(dst_ds):
            ds = ogrDriver.Open(dst_ds, 1)
        else:
            ds = ogrDriver.CreateDataSource(dst_ds)

    elif ogr_driver_str == 'FileGDB':
        max_fld_width = 1024
        if os.path.isdir(dst_ds):
            ds = ogrDriver.Open(dst_ds, 1)
        else:
            ds = ogrDriver.CreateDataSource(dst_ds)

    elif ogr_driver_str == 'PostgreSQL':
        max_fld_width = 1024
        # DB must already exist
        ds = ogrDriver.Open(dst_ds, 1)

    else:
        logger.error("Format {} is not supported".format(ogr_driver_str))

    if args.status:
        status = args.status
    elif args.bp_paths:
        status = 'tape'
    else:
        status = 'online'

    if ds is not None:

        ## Create table if it does not exist
        layer = ds.GetLayerByName(dst_lyr)
        fld_list = [f.fname for f in fld_defs]

        err.err_level = gdal.CE_None
        tgt_srs = utils.osr_srs_preserve_axis_order(osr.SpatialReference())
        tgt_srs.ImportFromEPSG(args.epsg)
        if err.err_level >= gdal.CE_Warning:
            raise RuntimeError(err.err_level, err.err_no, err.err_msg)

        if not layer:
            logger.info("Creating table...")

            layer = ds.CreateLayer(dst_lyr, tgt_srs, ogr.wkbMultiPolygon)
            if layer:
                for field_def in fld_defs:
                    field = ogr.FieldDefn(field_def.fname, field_def.ftype)
                    field.SetWidth(min(max_fld_width, field_def.fwidth))
                    field.SetPrecision(field_def.fprecision)
                    layer.CreateField(field)

        ## Append Records
        if layer:
            # Get field widths
            lyr_def = layer.GetLayerDefn()
            fwidths = {
                lyr_def.GetFieldDefn(i).GetName().upper():
                lyr_def.GetFieldDefn(i).GetWidth()
                for i in range(lyr_def.GetFieldCount())
            }

            logger.info("Appending records...")
            #### loop through records and add features
            i = 0
            recordids = []
            invalid_record_cnt = 0

            for groupid in groups:
                for record in groups[groupid]:
                    i += 1
                    progress(i, total, "features written")
                    feat = ogr.Feature(layer.GetLayerDefn())
                    valid_record = True

                    ## Set attributes
                    ## Fields for scene DEM
                    if args.mode == 'scene':

                        attrib_map = {
                            'SCENEDEMID':
                            record.dsp_sceneid if
                            (args.dsp_original_res
                             and record.is_dsp) else record.sceneid,
                            'STRIPDEMID':
                            record.dsp_stripdemid if
                            (args.dsp_original_res
                             and record.is_dsp) else record.stripdemid,
                            'STATUS':
                            status,
                            'PAIRNAME':
                            record.pairname,
                            'SENSOR1':
                            record.sensor1,
                            'SENSOR2':
                            record.sensor2,
                            'ACQDATE1':
                            record.acqdate1.strftime('%Y-%m-%d'),
                            'ACQDATE2':
                            record.acqdate2.strftime('%Y-%m-%d'),
                            'CATALOGID1':
                            record.catid1,
                            'CATALOGID2':
                            record.catid2,
                            'HAS_LSF':
                            int(os.path.isfile(record.lsf_dem)),
                            'HAS_NONLSF':
                            int(os.path.isfile(record.dem)),
                            'IS_XTRACK':
                            int(record.is_xtrack),
                            'IS_DSP':
                            0 if args.dsp_original_res else int(record.is_dsp),
                            'ALGM_VER':
                            record.algm_version,
                            'PROJ4':
                            record.proj4,
                            'EPSG':
                            record.epsg,
                        }

                        attr_pfx = 'dsp_' if args.dsp_original_res else ''
                        for k in record.filesz_attrib_map:
                            attrib_map[k.upper()] = getattr(
                                record, '{}{}'.format(attr_pfx, k))

                        # Test if filesz attr is valid for dsp original res records
                        if args.dsp_original_res:
                            if attrib_map['FILESZ_DEM'] is None:
                                logger.error(
                                    "Original res filesz_dem is empty for {}. Record skipped"
                                    .format(record.sceneid))
                                valid_record = False
                            elif attrib_map['FILESZ_DEM'] == 0:
                                logger.warning(
                                    "Original res filesz_dem is 0 for {}. Record will still be written"
                                    .format(record.sceneid))

                        # Test if filesz attr is valid for normal records
                        elif not attrib_map['FILESZ_DEM'] and not attrib_map[
                                'FILESZ_LSF']:
                            logger.warning(
                                "DEM and LSF DEM file size is zero or null for {}. Record will still be written"
                                .format(record.sceneid))
                            valid_record = False

                        # Set region
                        try:
                            region = pairs[record.pairname]
                        except KeyError as e:
                            region = None
                        else:
                            attrib_map['REGION'] = region

                        if db_path_prefix:
                            if args.bp_paths:
                                # https://blackpearl-data2.pgc.umn.edu/dem/setsm/scene/WV02/2015/05/
                                # WV02_20150506_1030010041510B00_1030010043050B00_50cm_v040002.tar
                                custom_path = "{}/{}/{}/{}/{}.tar".format(
                                    args.mode,  # mode (scene, strip, tile)
                                    record.pairname[:4],  # sensor
                                    record.pairname[5:9],  # year
                                    record.pairname[9:11],  # month
                                    groupid  # mode-specific group ID
                                )

                            elif args.tnva_paths:
                                # /mnt/pgc/data/elev/dem/setsm/ArcticDEM/region/arcticdem_01_iceland/scenes/
                                # 2m/WV01_20200630_10200100991E2C00_102001009A862700_2m_v040204/
                                # WV01_20200630_10200100991E2C00_102001009A862700_504471479080_01_P001_504471481090_01_P001_2_meta.txt

                                if not region:
                                    logger.error(
                                        "Pairname not found in region lookup {}, cannot built custom path"
                                        .format(record.pairname))
                                    valid_record = False

                                else:
                                    pretty_project = PROJECTS[region.split('_')
                                                              [0]]
                                    res_dir = record.res_str + '_dsp' if record.is_dsp else record.res_str

                                    custom_path = "{}/{}/region/{}/scenes/{}/{}/{}".format(
                                        db_path_prefix,
                                        pretty_project,  # project (e.g. ArcticDEM)
                                        region,  # region
                                        res_dir,  # e.g. 2m, 50cm, 2m_dsp
                                        groupid,  # strip ID
                                        record.srcfn  # file name (meta.txt)
                                    )
                            else:
                                logger.error(
                                    "Mode {} does not support the specified custom path option, skipping record"
                                    .format(args.mode))
                                valid_record = False

                    ## Fields for strip DEM
                    if args.mode == 'strip':
                        attrib_map = {
                            'DEM_ID': record.stripid,
                            'STRIPDEMID': record.stripdemid,
                            'PAIRNAME': record.pairname,
                            'SENSOR1': record.sensor1,
                            'SENSOR2': record.sensor2,
                            'ACQDATE1': record.acqdate1.strftime('%Y-%m-%d'),
                            'ACQDATE2': record.acqdate2.strftime('%Y-%m-%d'),
                            'CATALOGID1': record.catid1,
                            'CATALOGID2': record.catid2,
                            'IS_LSF': int(record.is_lsf),
                            'IS_XTRACK': int(record.is_xtrack),
                            'EDGEMASK': int(record.mask_tuple[0]),
                            'WATERMASK': int(record.mask_tuple[1]),
                            'CLOUDMASK': int(record.mask_tuple[2]),
                            'ALGM_VER': record.algm_version,
                            'FILESZ_DEM': record.filesz_dem,
                            'FILESZ_MT': record.filesz_mt,
                            'FILESZ_OR': record.filesz_or,
                            'FILESZ_OR2': record.filesz_or2,
                            'PROJ4': record.proj4,
                            'EPSG': record.epsg,
                            'GEOCELL': record.geocell,
                        }

                        ## Set region
                        try:
                            region = pairs[record.pairname]
                        except KeyError as e:
                            pass
                        else:
                            attrib_map['REGION'] = region

                        if record.version:
                            attrib_map['REL_VER'] = record.version
                        if record.density:
                            attrib_map['DENSITY'] = record.density
                        else:
                            attrib_map['DENSITY'] = -9999

                        ## If registration info exists
                        if args.include_registration:
                            if len(record.reginfo_list) > 0:
                                for reginfo in record.reginfo_list:
                                    if reginfo.name == 'ICESat':
                                        attrib_map["DX"] = reginfo.dx
                                        attrib_map["DY"] = reginfo.dy
                                        attrib_map["DZ"] = reginfo.dz
                                        attrib_map["REG_SRC"] = 'ICESat'
                                        attrib_map[
                                            "NUM_GCPS"] = reginfo.num_gcps
                                        attrib_map[
                                            "MEANRESZ"] = reginfo.mean_resid_z

                        ## Set path folders for use if db_path_prefix specified
                        if db_path_prefix:
                            if args.bp_paths:
                                custom_path = "{}/{}/{}/{}/{}/{}.tar".format(
                                    db_path_prefix,
                                    args.mode,  # mode (scene, strip, tile)
                                    record.pairname[:4],  # sensor
                                    record.pairname[5:9],  # year
                                    record.pairname[9:11],  # month
                                    groupid  # mode-specific group ID
                                )
                            else:
                                logger.error(
                                    "Mode {} does not support the specified custom path option, skipping record"
                                    .format(args.mode))
                                valid_record = False

                    ## Fields for tile DEM
                    if args.mode == 'tile':
                        attrib_map = {
                            'DEM_ID': record.tileid,
                            'TILE': record.tilename,
                            'NUM_COMP': record.num_components,
                            'FILESZ_DEM': record.filesz_dem,
                        }

                        ## Optional attributes
                        if record.version:
                            attrib_map['REL_VER'] = record.version
                            version = record.version
                        else:
                            version = 'novers'
                        if record.density:
                            attrib_map['DENSITY'] = record.density
                        else:
                            attrib_map['DENSITY'] = -9999

                        if args.include_registration:
                            if record.reg_src:
                                attrib_map["REG_SRC"] = record.reg_src
                                attrib_map["NUM_GCPS"] = record.num_gcps
                            if record.mean_resid_z:
                                attrib_map["MEANRESZ"] = record.mean_resid_z

                        ## Set path folders for use if db_path_prefix specified
                        if db_path_prefix:
                            if args.bp_paths:
                                custom_path = "{}/{}/{}/{}/{}/{}.tar".format(
                                    db_path_prefix,
                                    record.mode,  # mode (scene, strip, tile)
                                    args.project.lower(),  # project
                                    record.res,  # resolution
                                    version,  # version
                                    groupid  # mode-specific group ID
                                )
                            else:
                                logger.error(
                                    "Mode {} does not support the specified custom path option, skipping record"
                                    .format(args.mode))
                                valid_record = False

                    ## Common fields
                    if valid_record:
                        ## Common Attributes across all modes
                        attrib_map['INDEX_DATE'] = datetime.datetime.today(
                        ).strftime('%Y-%m-%d')
                        attrib_map['CR_DATE'] = record.creation_date.strftime(
                            '%Y-%m-%d')
                        attrib_map['ND_VALUE'] = record.ndv
                        if args.dsp_original_res:
                            res = record.dsp_dem_res
                        else:
                            res = (record.xres + record.yres) / 2.0
                        attrib_map['DEM_RES'] = res

                        ## Set location
                        if db_path_prefix:
                            location = custom_path
                        else:
                            location = record.srcfp
                        attrib_map['LOCATION'] = location

                        ## Transform and write geom
                        src_srs = utils.osr_srs_preserve_axis_order(
                            osr.SpatialReference())
                        src_srs.ImportFromWkt(record.proj)

                        if not record.geom:
                            logger.error(
                                'No valid geom found, feature skipped: {}'.
                                format(record.sceneid))
                            valid_record = False
                        else:
                            temp_geom = record.geom.Clone()
                            transform = osr.CoordinateTransformation(
                                src_srs, tgt_srs)
                            try:
                                temp_geom.Transform(transform)
                            except TypeError as e:
                                logger.error(
                                    'Geom transformation failed, feature skipped: {} {}'
                                    .format(e, record.sceneid))
                                valid_record = False
                            else:

                                ## Get centroid coordinates
                                centroid = temp_geom.Centroid()
                                if 'CENT_LAT' in fld_list:
                                    attrib_map['CENT_LAT'] = centroid.GetY()
                                    attrib_map['CENT_LON'] = centroid.GetX()

                                ## If srs is geographic and geom crosses 180, split geom into 2 parts
                                if tgt_srs.IsGeographic:

                                    ## Get Lat and Lon coords in arrays
                                    lons = []
                                    lats = []
                                    ring = temp_geom.GetGeometryRef(
                                        0)  #### assumes a 1 part polygon
                                    for j in range(0, ring.GetPointCount()):
                                        pt = ring.GetPoint(j)
                                        lons.append(pt[0])
                                        lats.append(pt[1])

                                    ## Test if image crosses 180
                                    if max(lons) - min(lons) > 180:
                                        split_geom = wrap_180(temp_geom)
                                        feat_geom = split_geom
                                    else:
                                        mp_geom = ogr.ForceToMultiPolygon(
                                            temp_geom)
                                        feat_geom = mp_geom

                                else:
                                    mp_geom = ogr.ForceToMultiPolygon(
                                        temp_geom)
                                    feat_geom = mp_geom

                    ## Write feature
                    if valid_record:
                        for fld, val in attrib_map.items():
                            if fld in fwidths:
                                if isinstance(val,
                                              str) and len(val) > fwidths[fld]:
                                    logger.warning(
                                        "Attribute value {} is too long for field {} (width={}). Feature skipped"
                                        .format(val, fld, fwidths[fld]))
                                    valid_record = False
                            else:
                                logger.warning(
                                    "Field {} is not in target table. Feature skipped"
                                    .format(fld))
                                valid_record = False
                            feat.SetField(  # force unicode to str for a bug in GDAL's SetField. Revisit in Python3
                                fld.encode('utf-8'),
                                val if not isinstance(val, unicode) else
                                val.encode('utf-8'))
                        feat.SetGeometry(feat_geom)

                        ## Add new feature to layer
                        if not valid_record:
                            invalid_record_cnt += 1
                        else:
                            if not args.dryrun:
                                # Store record identifiers for later checking
                                recordids.append(
                                    recordid_map[args.mode].format(
                                        **attrib_map))

                                # Append record
                                err.err_level = gdal.CE_None
                                try:
                                    if ogr_driver_str in ('PostgreSQL'):
                                        layer.StartTransaction()
                                        layer.CreateFeature(feat)
                                        layer.CommitTransaction()
                                    else:
                                        layer.CreateFeature(feat)
                                except Exception as e:
                                    raise e
                                else:
                                    if err.err_level >= gdal.CE_Warning:
                                        raise RuntimeError(
                                            err.err_level, err.err_no,
                                            err.err_msg)
                                finally:
                                    gdal.PopErrorHandler()

            if invalid_record_cnt > 0:
                logger.info(
                    "{} invalid records skipped".format(invalid_record_cnt))

            if len(recordids) == 0:
                logger.error("No valid records found")
                sys.exit(-1)

            # Check contents of layer for all records
            if args.check and not args.dryrun:
                layer.ResetReading()
                attrib_maps = [{
                    id_fld: feat.GetField(id_fld)
                    for id_fld in id_flds if id_fld in fld_list
                } for feat in layer]
                layer_recordids = [
                    recordid_map[args.mode].format(**attrib_map)
                    for attrib_map in attrib_maps
                ]
                layer_recordids = set(layer_recordids)

                err_cnt = 0
                for recordid in recordids:
                    if recordid not in layer_recordids:
                        err_cnt += 1
                        logger.error(
                            "Record not found in target layer: {}".format(
                                recordid))

                if err_cnt > 0:
                    sys.exit(-1)

        else:
            logger.error('Cannot open layer: {}'.format(dst_lyr))
            ds = None
            sys.exit(-1)

        ds = None

    else:
        logger.info("Cannot open dataset: {}".format(dst_ds))
        sys.exit(-1)

    if args.dryrun:
        logger.info("Done (dryrun)")
    else:
        logger.info("Done")
    sys.exit(0)