Exemple #1
0
def extract_file(name):
    """TODO: Insert assertions for error handling."""
    """Extract the zip and save the contents of the zip into a directory
    organized by username in the config file.
    Save the GeoJSON output of the gpx in a mongo db instance."""
    with zipfile.ZipFile(os.path.join(UPLOAD_DEST, name)) as zipF:
        with zipF.open('config.json') as f:
            config = json.load(f)
            zipF.extractall(
                os.path.join(UPLOAD_DEST, 'extracted_data',
                             config.get('Device ID'), config.get('User')))
        for files in zipF.infolist():
            if files.filename.endswith(".gpx"):
                with zipF.open(files) as f:
                    ogr2ogr.main([
                        '', '-skipfailures', '-f', 'GeoJSON',
                        os.path.join(UPLOAD_DEST, files.filename + '.json'),
                        os.path.join(UPLOAD_DEST, 'extracted_data',
                                     config.get('Device ID'),
                                     config.get('User'), files.filename)
                    ])
                    filename = os.path.join(UPLOAD_DEST,
                                            files.filename + '.json')
                    with open(filename) as jsonFile:
                        config['track'] = json.load(jsonFile)
                    os.remove(filename)
                config['track-name'] = files.filename.rstrip('.gpx')
    mongo.db.tracks.save(config)
    return True
def clMask2(img, clPath, baseName, wod):
    year=baseName.split("_")[2][0:4] 
    tile=baseName.split("_")[5]
    mainDir=os.path.join(clPath, tile, year, baseName+".SAFE")
    if not os.path.exists(mainDir):
        print "problem finding cloud mask folder"
        raise SystemExit

# look for cloudmask in remote folder
    for r, d, files in os.walk(mainDir):
        for f in  files:
            if f.endswith("B00.gml"):
                clMpath=os.path.join(r,f)
    global clMpath
    cpv=wod+"cloudVec.shp"
    if not QgsVectorLayer(clMpath, "cl", "ogr").isValid():
        print "problem with original cloud mask or no clouds in the image"
        return img
    #translate gml to shapefile
    ogr2ogr.main(["","-f", "ESRI Shapefile", "-s_srs", "EPSG:32630", cpv, clMpath])
    if not QgsVectorLayer(cpv, "cl", "ogr").isValid():
        print "cloud vector translation didn't go well"
        raise SystemExit
    else:
        print "cloud vector translation did work!!"
    # rasterize cloud mask
    # parameters to rasterize
    cloudRes=QgsRasterLayer(img).rasterUnitsPerPixelX()
    cloudRast=wod+"clMask.tif"
    #rasterization
    x=datetime.datetime.now()

    cmd="gdal_rasterize -burn 10 -a_srs %s -te %s -tr %s %s %s %s" %(imgCrs.authid(), extImg.replace(","," "), cloudRes, cloudRes, cpv, cloudRast)
    print cmd
    os.system(cmd)

    #change nodata value
    cmd="gdal_edit.py -unsetnodata %s" %(cloudRast)
    os.system(cmd)
    print "check nodatavalues in %s" %(cloudRast)
        #cloudRast2=wod+"clMaskNoData.tif"
    #p.runalg("grass7:r.null", cloudRast,"",1,False,False,False,False,False, extImg,cloudRes, cloudRast2)
    if not QgsRasterLayer(cloudRast).isValid():
        print "Cloud raster %s has a problem \n" %(cloudRast)
  # Mask out cloudy pixels
    rMaskPath=wod+"clMaskNDVI.tif"
    cmd="gdal_calc.py -A %s -B %s --outfile=%s --calc='A*(B<9)' " %(img, cloudRast, rMaskPath)
    print(cmd)
    os.system(cmd)
    y=datetime.datetime.now()
    c=y-x
    d=divmod(c.days*86400+c.seconds, 60)
    print "finished cloud masking in %f minutes and %f seconds" %(d[0], d[1])
    if not QgsRasterLayer(rMaskPath).isValid():
        print "problem masking image %s function 8 clMask" %baseName
        raise systemExit
    else:
        print"output of function 8 clMask is:\n %s" %rMaskPath
        return rMaskPath
Exemple #3
0
def clMask2(img, clPath, baseName, wod):
    year=baseName. split("_")[2][0:4]
    tile=baseName.split("_")[5]
    mainDir=os.path.join(clPath, tile, year, baseName+".SAFE")
    if not os.path.exists(mainDir):
        print "problem finding cloud mask folder"
        raise SystemExit

# look for cloudmask in remote folder
    for r, d, files in os.walk(mainDir):
        for f in  files:
            if f.endswith("B00.gml"):
                clMpath=os.path.join(r,f)
    global clMpath
    cpv=wod+"cloudVec.shp"
    if not QgsVectorLayer(clMpath, "cl", "ogr").isValid():
        print "problem with original cloud mask or no clouds in the image"
        return img
    #translate gml to shapefile
    ogr2ogr.main(["","-f", "ESRI Shapefile", "-s_srs", "EPSG:32630", cpv, clMpath])
    if not QgsVectorLayer(cpv, "cl", "ogr").isValid():
        print "cloud vector translation didn't go well"
        raise SystemExit
    else:
        print "cloud vector translation did work!!"
    # rasterize cloud mask
    # parameters to rasterize
    fName=[field.name() for field in QgsVectorLayer(cpv, "cl", "ogr").pendingFields()][0]
    print "first field name is"
    print fName
    cloudRes=QgsRasterLayer(img).rasterUnitsPerPixelX()
    cloudRast=wod+"clMask.tif"
    #rasterization
    x=datetime.datetime.now()
    cmd="gdal_rasterize -burn 0 -a_nodata 1000 -a_srs %s -te %s -tr %s %s %s %s" %(imgCrs.authid(), extImg.replace(","," "), cloudRes, cloudRes, cpv, cloudRast)
    print cmd
    os.system(cmd)
    #p.runalg("gdalogr:rasterize", cpv, fName, 1, 10,10, extImg, False, 5,0,4,75, 6,1,False,0,"-burn 1 -a_srs 'EPSG:32630'",cloudRast)
    if not QgsRasterLayer(cloudRast).isValid():
        #turn null to 0 values in cloud mask and use them
        print "Cloud raster {cloudRast} has a problem \n"
        raise SystemExit
    # Mask out cloudy pixels
    rMaskPath=wod+"clMaskNDVI.tif"
    cmd="gdal_calc.py -A %s -B %s --outfile=%s --calc='A*(B>0)' " %(img, cloudRast, rMaskPath)
    print(cmd)
    os.system(cmd)
    #p.runalg("gdalogr:rastercalculator", img, "1", cloudRast, "1", None, "1", None, "1", None, "1", None, "1", "A*((-1*B)+1)", "", 5, "", rMaskPath)
    #TODO add gdal calc expression
    y=datetime.datetime.now()
    c=y-x
    d=divmod(c.days*86400+c.seconds, 60)
    print "finished cloud masking in %f minutes and %f seconds" %(d[0], d[1])
    if not QgsRasterLayer(rMaskPath).isValid():
        print "problem masking image %s function 8 clMask" %baseName
        raise systemExit
    else:
        print"output of function 8 clMask is:\n %s" %rMaskPath
        return rMaskPath
Exemple #4
0
def update_geojson(in_gpx, target_geojson):
    # Convert GPX to a temporary geojson file - will be appended in second step to target geojson
    ogr2ogr.main([
        "", "-f", "geojson", in_gpx, "tracks", "tmp.geoJSON",
        "-fieldTypeToString", "DateTime"
    ])
    # placeholder
    ogrmerge.process([""])
def transform(outFile, inFile, engine="ESRI Shapefile"):
    """Transfor a file using ogr2ogr."""
    # from: https://gis.stackexchange.com/questions/39080/
    #       gishow-do-i-use-ogr2ogr-to-convert-a-gml-to-shapefile-in-python/
    #       41637#41637
    # note: main is expecting sys.argv, where the first argument is the script
    # name so, the argument indices in the array need to be offset by 1
    ogr2ogr.main(["", "-f", engine, outFile, inFile])
Exemple #6
0
def main():
    #note: main is expecting sys.argv, where the first argument is the script name
    #so, the argument indices in the array need to be offset by 1
    ogr2ogr.main([
        "", "-f", "KML", "out.kml",
        "data/san_andres_y_providencia_administrative.shp"
    ])

    main()
def project_to_input(infeatures, outputFile2, gridpolys):
    shp = driver.Open(infeatures, 0)                                                # Opening the file with GDAL, with read only acces
    lyr = shp.GetLayer()
    spatialref = lyr.GetSpatialRef().ExportToWkt()
    if shp is None:
        print "Open failed.\n"
    ogr2ogr.main(["","-f", "ESRI Shapefile", "-t_srs", spatialref, outputFile2, gridpolys])   #note: main is expecting sys.argv, where the first argument is the script name, so the argument indices in the array need to be offset by 1
    shp = lyr = spatialref = None
    return outputFile2
Exemple #8
0
def isValid(filePath):
    '''
    Checks whether it is valid GML or not. \n
    input "path": type string, path to file which shall be extracted \n
    output true if file is valid, false if not
    '''
    try:
        ogr2ogr.main(["","-f", "GeoJSON", "outputV.json", filePath])
        myGeojson = pygeoj.load(filepath="outputV.json")
        return True
    except:
        raise Exception('The gml file from ' + filePath + ' has no valid gml Attributes')
Exemple #9
0
def getBoundingBox(filePath):
    '''         
    extract bounding box from gml \n
    input "filepath": type string, file path to gml file \n
    returns bounding box of the file: type list, length = 4 , type = float, schema = [min(longs), min(lats), max(longs), max(lats)]
    '''
    ogr2ogr.main(["","-f", "GeoJSON", "outputB.json", filePath])
    myGeojson = pygeoj.load(filepath="outputB.json")
    os.remove("outputB.json")
    if myGeojson.bbox is not None:    
        return (myGeojson.bbox)
    else:
        raise Exception('The gml file from ' + filePath + ' has no BoundingBox')
Exemple #10
0
def getVectorRepresentation(filePath):
    '''
    extracts coordinates from gml File (for vector representation) \n
    input "filepath": type string, file path to gml file \n
    returns extracted coordinates of content: type list, list of lists with length = 2
    '''
    ogr2ogr.main(["","-f", "GeoJSON", "outputV.json", filePath])
    myGeojson = pygeoj.load(filepath="outputV.json")
    properties= (myGeojson.get_feature(0).geometry.coordinates[0])
    os.remove("outputV.json")
    if properties is None:
        raise Exception('The gml file from ' + filePath + ' has no VectorRepresentation')
    else:
        return properties
 def ISOCase(filepath):
     try:
         # @see https://gis.stackexchange.com/questions/39080/using-ogr2ogr-to-convert-gml-to-shapefile-in-python
         # convert the gml file to a GeoJSON file
         with tempfile.TemporaryDirectory() as tmpdirname:
             curDir = os.getcwd()
             os.chdir(tmpdirname)
             ogr2ogr.main(["", "-f", "GeoJSON", "output.json", filepath])
             res = getPolygon("output.json", tmpdirname)
             os.chdir(curDir)
         return res
     # errors
     except:
         return (None,
                 "file not found or your gml/xml/kml data is not valid")
def project_to_input(infeatures, outputFile2, gridpolys):
    shp = driver.Open(infeatures,
                      0)  # Opening the file with GDAL, with read only acces
    lyr = shp.GetLayer()
    spatialref = lyr.GetSpatialRef().ExportToWkt()
    if shp is None:
        print "Open failed.\n"
    ogr2ogr.main(
        [
            "", "-f", "ESRI Shapefile", "-t_srs", spatialref, outputFile2,
            gridpolys
        ]
    )  #note: main is expecting sys.argv, where the first argument is the script name, so the argument indices in the array need to be offset by 1
    shp = lyr = spatialref = None
    return outputFile2
Exemple #13
0
    def __clip_shapefile(self, file):
        """ Create a new shapefile with rows and paths added to it """
        print "Clipping the shapefile: %s" % get_file(file)

        clipper = '%s/%s' % (self.shapefile_output, get_file(file))
        output = '%s/landsat-tiles.shp' % self.shapefile_output
        input = '%s/wrs2_descending/wrs2_descending.shp' % self.assests_dir
        argv = ['', '-clipsrc', clipper, output, input]

        if os.path.isfile(output):
            argv.insert(1, '-overwrite')

        ogr2ogr.main(argv)

        return True
Exemple #14
0
def to_CSV(screenOn, screen, filein, fileout):
  """
  Note: main is expecting sys.argv, where the first argument is the script name
  so, the argument indices in the array need to be offset by 1

  This can be used to prune larger shapefiles into smaller files 

  ======================
  ogr2ogr -f CSV  precip_20140107.csv  nws_precip_1day_observed_20140107.shp -lco GEOMETRY=AS_XYZ
  """
  if(screenOn):
    query = ["", "-f", "CSV", "-where", screen, fileout, filein,  "-lco", "GEOMETRY=AS_XYZ","-skipfailures", "-overwrite"]
  else:
    query = ["", "-f", "CSV", fileout, filein, "-lco", "GEOMETRY=AS_XYZ"]
  ogr2ogr.main(query)
Exemple #15
0
def convertShpToJson(shapefiles, out_folder, where=None):
    result = (True, [])
    converted = result[1]
    params = ["", "-f", "geojson", "-t_srs", "epsg:3857"]

    if where:
        params.append("-where")
        params.append(where)

    try:
        for shp in shapefiles[1]:
            in_file = os.path.join(shapefiles[0], shp)

            #Remove any . characters from the output name
            out_file = os.path.join(
                out_folder, "{0}.{1}".format(
                    os.path.splitext(shp)[0].replace(".", ""), "json"))
            logging.debug("Converting {0} to {1}".format(in_file, out_file))
            args = params + [out_file, in_file]
            conversion = ogr2ogr.main(args)

            if not conversion:
                result = (False,
                          "SHP to GEOJSON conversion failed: {0}".format(shp))
                break
            else:
                converted.append(os.path.split(out_file)[1])

    except Exception as e:
        msg = str(e).replace("\n", " ")
        result = (False, msg)

    return result
Exemple #16
0
def to_shp(screenOn, screen, filein, fileout):
  """
  Note: main is expecting sys.argv, where the first argument is the script name
  so, the argument indices in the array need to be offset by 1

  This can be used to prune larger shapefiles into smaller files 

  ======================
  EXAMPLE: 
  to_shp(["", "ogr2ogr", "-f", "ESRI Shapefile", "-where", "state='California'", "CA_roads_10m.shp" "ne_10m_roads_north_america.shp" ])
  """
  if(screenOn):
  	query = ["", "-f", "ESRI Shapefile", "-where", screen, fileout, filein]
  else:
  	query = ["", "-f", "ESRI Shapefile", fileout, filein]
  ogr2ogr.main(query)
def _load_tab_resources(tab_res, table_name):
    url = tab_res['url'].replace('https', 'http')
    logger.debug("using TAB file " + url)
    filepath, headers = urllib.urlretrieve(url, "input.zip")
    logger.debug("TAB archive downlaoded")

    subprocess.call(['unzip', '-j', filepath])
    logger.debug("TAB unziped")

    tabfiles = glob.glob("*.[tT][aA][bB]")
    if len(tabfiles) == 0:
        _failure("No tab files found in zip " + tab_res['url'])

    tab_file = tabfiles[0]

    native_crs = 'EPSG:4326'

    pargs = [
        '', '-f', 'PostgreSQL', "--config", "PG_USE_COPY", "YES",
        _get_db_param_string(_get_db_settings()), tab_file, '-nln', table_name,
        '-lco', 'GEOMETRY_NAME=geom', "-lco", "PRECISION=NO", '-t_srs',
        native_crs, '-nlt', 'PROMOTE_TO_MULTI', '-overwrite'
    ]

    res = ogr2ogr.main(pargs)
    if not res:
        _failure("Ogr2ogr: Failed to convert file to PostGIS")

    return native_crs
def import_geojson(tbname, src):
    log_info("Importing geojson data: ")
    log_info("  " + src)

    log_info("  Loading geojson data into the " + tbname + " table")
    ogr2ogr.main([
        "", "-f", "PostgreSQL", "PG:" + pgconfig.connstr, src, "-nln",
        pgconfig.schema + "." + tbname, "-lco", "GEOMETRY_NAME=geom",
        "-overwrite"
    ])  # "-nlt", "PROMOTE_TO_MULTI"

    log_info("  Updating hazard_events table")
    sql = read_file(tbname + ".sql")
    execute_sql(sql)

    log_info("Import succeeded.")
Exemple #19
0
def convertShpToJson(shapefiles, out_folder, where=None):
    result = (True, [])
    converted = result[1]
    params = ["", "-f", "geojson", "-t_srs", "epsg:3857"]
    
    if where:
        params.append("-where")
        params.append(where)

    try:
        for shp in shapefiles[1]:
            in_file = os.path.join(shapefiles[0], shp)
            
            #Remove any . characters from the output name
            out_file = os.path.join(out_folder, "{0}.{1}".format(os.path.splitext(shp)[0].replace(".", ""), "json"))
            logging.debug("Converting {0} to {1}".format(in_file, out_file))
            args = params + [out_file, in_file]
            conversion = ogr2ogr.main(args)
            
            if not conversion:
                result = (False, "SHP to GEOJSON conversion failed: {0}".format(shp))
                break
            else:
                converted.append(os.path.split(out_file)[1])

    except Exception as e:
        msg = str(e).replace("\n", " ")
        result = (False, msg)

    return result
Exemple #20
0
    def getCloudMask(self, raw_path, out_path):
        """
        Placeholder
        """

        mask_pathname = None

        # find gml
        gml = getFile(raw_path, '*CLOUDS*.gml')
        if gml is not None:

            # projection not parsed correctly from l2a gml - retrieve from scene raster
            scene = getFile(raw_path, '*B02_10m.jp2')
            if scene is not None:
                epsg = getEpsgCode(scene)

                # generate shape file using raster epsg
                shp = os.path.join(out_path, 'clouds.shp')
                if ogr2ogr.main([
                        "", "-f", "ESRI Shapefile", "-a_srs", 'EPSG:' + epsg,
                        shp, gml
                ]) is True:
                    mask_pathname = shp

        return mask_pathname
Exemple #21
0
 def ISOCase(filepath):
     """Method for extracting the convex hull of an ISO19xxx standardized file
     @param filepath Full path to ISO19xxx standardized file
     @returns a tuple where in first place is the convex hull as an array of point tuples
     """
     try:
         # @see https://gis.stackexchange.com/questions/39080/using-ogr2ogr-to-convert-gml-to-shapefile-in-python
         # convert the gml file to a GeoJSON file
         with tempfile.TemporaryDirectory() as tmpdirname:
             curDir = os.getcwd()
             os.chdir(tmpdirname)
             ogr2ogr.main(["", "-f", "GeoJSON", "output.json", filepath])
             res = ogr2ogrCase("output.json")
             os.chdir(curDir)
         return res
     # errors
     except:
         return (None,
                 "file not found or your gml/xml/kml data is not valid")
def main(args):
    if (len(args) == 3):
        gmlFile=args[1]
        shpFile=args[2]
    else:
        print "./gml2shp.py <gml_file> <shape_file>\n"
        sys.exit(0)
    #note: main is expecting sys.argv, where the first argument is the script name
    #so, the argument indices in the array need to be offset by 1

    #Example using ogr2ogr
    #ogr2ogr.main(["","-f", "KML", "out.kml", "data/san_andres_y_providencia_administrative.shp"])

    #GML to multiple shape files
    #ogr2ogr -f "ESRI Shapefile" polygon.shp multipolygon.gml
        #GPS
        #ogr2ogr.main(["","-t_srs", "EPSG:4326", "-f", "ESRI Shapefile", "bgt_tunnelpart.shp", "bgt_tunnelpart.gml"])
    
        #lat and long
    #ogr2ogr.main(["","-t_srs" , "EPSG:28992", "-f", "ESRI Shapefile", "bgt_tunnelpart.shp", "bgt_tunnelpart.gml"])
    ogr2ogr.main(["","-t_srs" , "EPSG:28992", "-f", "ESRI Shapefile", shpFile, gmlFile])
Exemple #23
0
    def __srs_adjustment(self, file, load='a', type='EPSG:4326'):
        """ Run SRS adjustments

        Attributes:
            file - full path to the shapefile
            load - load key, consult ogr2ogr documentation
            type - type key, consult ogr2ogr documentation
        """

        print "Executing SRS adjustments"

        output = '%s/%s' % (self.shapefile_output, get_file(file))
        argv = ['', '-%s_srs' % load, type, os.path.dirname(output), file]

        if os.path.isfile(output):
            input = output
            argv.insert(1, '-overwrite')

        ogr2ogr.main(argv)

        return True
    def ISOCase(filepath):
        """Method for extracting the boundingbox of an ISO19xxx standardized file

        @param filepath Full path to the ISO19xxx standardized file
        @returns a boundingbox as an array in a tuple in WGS84, formated like ([minLong, minLat, maxLong, maxLat], None)
        """
        try:
            # @see https://gis.stackexchange.com/questions/39080/using-ogr2ogr-to-convert-gml-to-shapefile-in-python
            # convert the gml file to a GeoJSON file
            with tempfile.TemporaryDirectory() as tmpdirname:
                curDir = os.getcwd()
                os.chdir(tmpdirname)
                ogr2ogr.main(["", "-f", "GeoJSON", "output.json", filepath])
                # get boundingbox from generated GeoJSON file
                myGeojson = pygeoj.load(filepath="output.json")
                os.chdir(curDir)
            # delete generated GeoJSON file
            return (myGeojson.bbox, None)
        # errors
        except:
            return (None, "file not found or your gml/xml/kml data is not valid")
Exemple #25
0
def kmltogeojson(data):
    out = StringIO.StringIO()

    temp = tempfile.NamedTemporaryFile()
    temp.write(data.read())
    temp.flush()

    outname = temp.name + '.geojson'

    result = ogr2ogr.main(["", "-f", "GeoJSON", outname, temp.name])
    if result == True:
        return open(outname).read()
def main(args):
    if (len(args) == 3):
        gmlFile = args[1]
        shpFile = args[2]
    else:
        print "./gml2shp.py <gml_file> <shape_file>\n"
        sys.exit(0)
    #note: main is expecting sys.argv, where the first argument is the script name
    #so, the argument indices in the array need to be offset by 1

    #Example using ogr2ogr
    #ogr2ogr.main(["","-f", "KML", "out.kml", "data/san_andres_y_providencia_administrative.shp"])

    #GML to multiple shape files
    #ogr2ogr -f "ESRI Shapefile" polygon.shp multipolygon.gml
    #GPS
    #ogr2ogr.main(["","-t_srs", "EPSG:4326", "-f", "ESRI Shapefile", "bgt_tunnelpart.shp", "bgt_tunnelpart.gml"])

    #lat and long
    #ogr2ogr.main(["","-t_srs" , "EPSG:28992", "-f", "ESRI Shapefile", "bgt_tunnelpart.shp", "bgt_tunnelpart.gml"])
    ogr2ogr.main(
        ["", "-t_srs", "EPSG:28992", "-f", "ESRI Shapefile", shpFile, gmlFile])
    def ISOCase(filepath):
        """Method for extracting the boundingbox of an ISO19xxx standardized file

        @param filepath Full path to the ISO19xxx standardized file
        @returns a boundingbox as an array in a tuple in WGS84, formated like ([minLong, minLat, maxLong, maxLat], None)
        @see https://gis.stackexchange.com/questions/39080/using-ogr2ogr-to-convert-gml-to-shapefile-in-python
        """
        try:  # in case GDAL works
            ogr.UseExceptions()
            isofile = ogr.CreateGeometryFromGML(filepath)
            crs = isofile.GetSpatialRef()
            if crs.IsProjected() == 1:
                crs = int(crs.GetAttrValue("PROJCS|AUTHORITY", 1))
            elif crs.IsGeographic() == 1:
                crs = int(crs.GetAttrValue("GEOGCS|AUTHORITY", 1))
            else:
                return (None, "CRS is missing!")
            bbox = isofile.GetEnvelope()
            result = CRSTransform(bbox[1], bbox[0], crs)
            result.extend(CRSTransform(bbox[3], bbox[2], crs))
            ogr.DontUseExceptions()
            return (result, None)
        except:
            try:
                # convert the gml file to a GeoJSON file
                with tempfile.TemporaryDirectory() as tmpdirname:
                    curDir = os.getcwd()
                    os.chdir(tmpdirname)
                    ogr2ogr.main(
                        ["", "-f", "GeoJSON", "output.json", filepath])
                    # get boundingbox from generated GeoJSON file
                    result = ogr2ogrCase("output.json")
                    os.chdir(curDir)
                # delete generated GeoJSON file
                return result
            # errors
            except:
                return (None,
                        "file not found or your gml/xml/kml data is not valid")
Exemple #28
0
def getTemporalExtent(filePath):
    '''
    extracts temporal extent of the gml \n
    input "filepath": type string, file path to gml file \n
    returns temporal extent of the file: type list, length = 2, both entries have the type dateTime, temporalExtent[0] <= temporalExtent[1]
    '''
    dateArray= []
    ogr2ogr.main(["","-f", "GeoJSON", "outputT.json", filePath])
    myGeojson = pygeoj.load(filepath="outputT.json")
    properties= (myGeojson.get_feature(0).properties)
    for key, value in properties.items():     
            if key=="beginLifespanVersion" or key=="date" or key=="endLifespanVersion" or key=="Start_Date" or key=="End_Date":
                dateArray.append(value)
            else:
                pass
    temporal_extent= []
    os.remove("outputT.json")
    if(len(dateArray) > 0):
        temporal_extent.append(min(dateArray))
        temporal_extent.append(max(dateArray))
        return temporal_extent
    else: 
        raise Exception('The gml file from ' + filePath + ' has no TemporalExtent') 
Exemple #29
0
    def __extract_country(self, name):
        """ Create a new country shapefile with rows and paths

        Attributes:
            name - name of the country shapefile name e.g. country.shp
        """

        print "Extracting the country: %s" % name

        input = '%s/ne_50m_admin_0_countries/ne_50m_admin_0_countries.shp' % \
                self.assests_dir
        output = '%s/country.shp' % self.shapefile_output
        argv = [
            '', '-where',
            'admin like "%s" or adm0_a3 like "%s"' % (name, name), output,
            input
        ]

        if os.path.isfile(output):
            argv.insert(1, '-overwrite')

        ogr2ogr.main(argv)

        return True
Exemple #30
0
def ogr2ogr_exec(argstring):
    """
    Run an ogr2ogr command
    :param argstring: command line arguments as string
    :return: success or failure
    """
    args = ["", ]
    args.extend(split_args(argstring))
    old_stdout = sys.stdout
    result = StringIO()
    sys.stdout = result
    try:
        foo = ogr2ogr.main(args)
        if not foo:
            raise Exception(result.getvalue())
    finally:
        if old_stdout:
            sys.stdout = old_stdout
Exemple #31
0
def _upload2pg(dir, schema, epsg_code):
    """Tries to upload the shapefile to postgres. If something went wrong
    it returns the problem, returns True otherwise"""
    db_conf = settings.DATABASES['default']
    #maybe better to use a system with ogr2ogr?
    args = [
        'ogr2ogr.py', '-f', 'PostgreSQL',
        'PG:host={0} user={1} dbname={2} password={3} active_schema={4}\
        '.format(db_conf['HOST'], db_conf['USER'], db_conf['NAME'],
                 db_conf['PASSWORD'], schema), '-a_srs',
        'EPSG:' + str(epsg_code), '-lco', 'PRECISION=NO', dir
    ]
    try:
        return ogr2ogr.main(args)
    except Exception, e:
        #the module ogr2ogr doesn't specify witch exception could be
        #raised, try the generic one
        return e.message
Exemple #32
0
def _upload2pg(dir, schema, epsg_code):
    """Tries to upload the shapefile to postgres. If something went wrong
    it returns the problem, returns True otherwise"""
    db_conf = settings.DATABASES['default']
    #maybe better to use a system with ogr2ogr?
    args = ['ogr2ogr.py',
        '-f', 'PostgreSQL',
        'PG:host={0} user={1} dbname={2} password={3} active_schema={4}\
        '.format(db_conf['HOST'],db_conf['USER'],db_conf['NAME'],
                 db_conf['PASSWORD'],schema),
        '-a_srs', 'EPSG:'+str(epsg_code),
        '-lco', 'PRECISION=NO',
        dir]
    try:
        return ogr2ogr.main(args)
    except Exception, e:
        #the module ogr2ogr doesn't specify witch exception could be
        #raised, try the generic one
        return e.message
Exemple #33
0
def ogr2ogr_exec(argstring):
    """
    Run an ogr2ogr command
    :param argstring: command line arguments as string
    :return: success or failure
    """
    args = [
        "",
    ]
    args.extend(split_args(argstring))
    old_stdout = sys.stdout
    result = StringIO()
    sys.stdout = result
    try:
        foo = ogr2ogr.main(args)
        if not foo:
            raise Exception(result.getvalue())
    finally:
        if old_stdout:
            sys.stdout = old_stdout
Exemple #34
0
import ogr2ogr
import os


import_directory = "toImport/"
output_directory = "../src/data"

for filename in os.listdir(import_directory):
	ogr2ogr.main(["", "-t_srs", "EPSG:4326", "-f", "GeoJSON", output_directory + filename + ".json", filename + ".shp"])


Exemple #35
0
    def readInputFile(self):
        def checkDirExist(dir):
            if not os.path.exists(dir):
                os.makedirs(dir)

        checkDirExist(".\\input")
        checkDirExist(".\\output")
        try:
            # 讀檔
            for i, files in enumerate(self.read_files):
                if files != "":
                    # 重置參數
                    self.options = {}
                    # 分割當前種類檔名
                    fs = files.split(',')
                    for f in fs:
                        # 去除空白
                        f = f.strip()
                        self.file_set.append("{}.{}".format(f, self.ext[i]))
                        '''dataframe前處理'''
                        # shp檔
                        if self.ext[i] == 'shp':
                            pass
                        # csv檔
                        elif self.ext[i] == 'csv':
                            # 啟用指定的fiona驅動
                            # see: https://gdal.org/drivers/vector/index.html
                            supported_drivers['CSV'] = 'rw'
                            # 指定經緯度參數的欄位名稱
                            # see: https://gdal.org/drivers/vector/csv.html#reading-csv-containing-spatial-information
                            self.options['X_POSSIBLE_NAMES'] = 'Lon'
                            self.options['Y_POSSIBLE_NAMES'] = 'Lat'
                        # kml檔
                        elif self.ext[i] == 'kml':
                            # kml轉shp shp檔存在 "input\\KML\\當前檔名" 資料夾下
                            out_dir = ".\\input\\KML\\{}".format(f)  # 資料夾路徑
                            out_path = "{}\\{}.shp".format(out_dir, f)  # 檔案路徑
                            checkDirExist(out_dir)
                            ogr2ogr.main(["", "-f", "ESRI Shapefile", out_path, ".\\input\\{}.kml".format(f),
                                          "-dim", "2",
                                          "-lco", "ENCODING=UTF-8"])
                            # supported_drivers['libkml'] = 'rw'
                            # supported_drivers['LIBKML'] = 'rw'
                            # self.options['driver']='KML'
                        '''讀取為dataframe'''
                        print("\n讀取檔案: {}.{}".format(f, self.ext[i]))
                        if self.ext[i] == 'kml':
                            df = gp.read_file(out_path, encoding='utf-8')
                        elif self.ext[i] == 'asc':
                            res, err, geom = self.asc.getGeometry(f, self.temp_dir)
                            if not res:
                                self.err_msg += err
                                return False, self.err_msg, self.file_set, self.df_set, self.color_set
                            else:
                                df = gp.GeoDataFrame.from_features(geom)
                                print(df.head())
                                self.df_set.append(df)
                            continue
                        else:
                            df = gp.read_file(".\\input\\{}.{}".format(f, self.ext[i]), encoding='utf-8',
                                              **self.options)

                        # {'init': 'epsg:4326'}會導致xy軸座標交換的錯誤
                        # see: https://github.com/pyproj4/pyproj/issues/355
                        df.crs = 'EPSG:4326'  # 避免input沒給 這邊給預設值(WGS84)
                        print("原座標系統: {}".format(df.crs))
                        # 座標轉換
                        df = df.to_crs(epsg=4326)
                        print("轉換為: {}".format(df.crs))

                        # 只取geometry
                        # search = u"臺北市"
                        # df = df[df['COUNTYNAME'].isin(["臺北市"])]
                        print(df.head())
                        # df = df[['COUNTYNAME','geometry']]
                        df = df[['geometry']]
                        df.reset_index(drop=True)
                        self.df_set.append(df)
                i += 1
            return True, self.err_msg, self.file_set, self.df_set, self.color_set
        except DriverError:
            self.err_msg += "無法讀取檔案: {}!".format(f)
            return False, self.err_msg, self.file_set, self.df_set, self.color_set
#get the list of shapefiles in cwd
shapelist = getshp(cwd)

#code not used yet, but it could at some#
#point be useful to pass layer objects#
#open the files as layer objects
#layerlist = openlist(shapelist)

#make a dict from the filenames and the layer objects
#layerdict = dict(zip(shapelist, layerlist))

#add fields and values from filename
for layer in shapelist:
	valuedict = splitname(layer, fieldnames)
	addfields(layer, valuedict)
	addfeaturearea(layer, 'Area')
	addfieldpercent(layer,'Area', 'Percent')

print("Fields and Values added")

#merge files into a single .shp file using method in vector.py
mergeSpatialFiles(shapelist, 'mergedshp.shp', 'ESRI Shapefile')

print("Shapefiles merged")

#convert merged .shp file into a .geojson file
ogr2ogr.main(["","-f", "GeoJSON", "out.geojson", "mergedshp.shp"])


print("Export to geojson successful")
def main(argv):
    #Parse options
    parser = OptionParser()
    parser.add_option("-p", "--polygoninput", dest="polyInput", default="./tmp/grid.shp",
                        help="Input polygon SHP. Default is a grid measured by -l and -r", metavar="FILE")
    parser.add_option("-i", "--pointinput", dest="pointInput",
                        help="Input csv file path", metavar="FILE")
    parser.add_option("-x", "--longitude", dest="longitude", default='longitude',
                        help="CSV longitude header. Default is 'longitude'")
    parser.add_option("-y", "--latitude", dest="latitude", default='latitude',
                        help="CSV latitude header. Default is 'latitude'")
    parser.add_option("-z", "--zfield", dest="zfield", default=False,
                        help="CSV attribute header to aggregate on. Default is raw count")
    parser.add_option("-l", "--cols", dest="cols", default=0.5,
                        help="Fraction of longitude for grid if -p is not supplied. Default is 0.1")
    parser.add_option("-r", "--rows", dest="rows", default=0.5,
                        help="Fraction of latitude for grid if -p is not supplied. Default is 0.1")
    (options, args) = parser.parse_args()
    basename = os.path.basename(options.pointInput)
    inputname, inputextension = os.path.splitext(basename)
    #Clean up
    try:
      shutil.rmtree("./tmp")
    except:
      print "No cleanup required... Continuing..."
    #Write DBF
    os.makedirs("./tmp")
    ogr2ogr.main(["","-f","ESRI Shapefile","./tmp",options.pointInput])
    #Write VRT
    print "Writing CSV VRT..."
    vrt = open('./tmp/'+inputname+'.vrt','w')
    vrt.write("<OGRVRTDataSource>\n")
    vrt.write("\t<OGRVRTLayer name='"+inputname+"'>\n")
    vrt.write("\t\t<SrcDataSource relativeToVRT='1'>./</SrcDataSource>\n")
    vrt.write("\t\t<GeometryType>wkbPoint</GeometryType>\n")
    vrt.write("\t\t<LayerSRS>WGS84</LayerSRS>\n")
    vrt.write("\t\t<GeometryField encoding='PointFromColumns' x='"+options.longitude+"' y='"+options.latitude+"'/>\n")
    vrt.write("\t</OGRVRTLayer>\n")
    vrt.write("</OGRVRTDataSource>")
    vrt.close()
    #Write SHP
    print "Converting to SHP..."
    ogr2ogr.main(["","-f","ESRI Shapefile","./tmp","./tmp/"+inputname+".vrt","-overwrite"])
    
    #Convex hull
    # Get a Layer
    print "Calculating convex hull..."
    inConShapefile = "./tmp/"+inputname+".shp"
    inDriver = ogr.GetDriverByName("ESRI Shapefile")
    inConDataSource = inDriver.Open(inConShapefile, 0)
    inConLayer = inConDataSource.GetLayer()
    
    # Collect all Geometry
    geomcol = ogr.Geometry(ogr.wkbGeometryCollection)
    for feature in inConLayer:
       geomcol.AddGeometry(feature.GetGeometryRef())
    
    # Calculate convex hull
    convexhull = geomcol.ConvexHull()
    
    # Save extent to a new Shapefile
    outConShapefile = "./tmp/convexhull.shp"
    outDriver = ogr.GetDriverByName("ESRI Shapefile")
    
    # Remove output shapefile if it already exists
    if os.path.exists(outConShapefile):
       outDriver.DeleteDataSource(outConShapefile)
    
    # Create the output shapefile
    outConDataSource = outDriver.CreateDataSource(outConShapefile)
    outConLayer = outConDataSource.CreateLayer("convexhull", geom_type=ogr.wkbPolygon)
    
    # Add an ID field
    idField = ogr.FieldDefn("id", ogr.OFTInteger)
    outConLayer.CreateField(idField)
    
    # Create the feature and set values
    featureDefn = outConLayer.GetLayerDefn()
    feature = ogr.Feature(featureDefn)
    feature.SetGeometry(convexhull)
    feature.SetField("id", 1)
    outConLayer.CreateFeature(feature)
    
    # Close DataSource
    inConDataSource.Destroy()
    
    #Create grid
    print "Creating grid..."
    #Get extent
    xmin, xmax, ymin, ymax = outConLayer.GetExtent()
    # convert sys.argv to float
    xmin = float(xmin)
    xmax = float(xmax)
    ymin = float(ymin)
    ymax = float(ymax)
    gridWidth = float(options.cols)
    gridHeight = float(options.rows)

    # get rows
    rows = ceil((ymax-ymin)/gridHeight)
    # get columns
    cols = ceil((xmax-xmin)/gridWidth)

    # start grid cell envelope
    ringXleftOrigin = xmin
    ringXrightOrigin = xmin + gridWidth
    ringYtopOrigin = ymax
    ringYbottomOrigin = ymax-gridHeight

    # create output file
    outDriver = ogr.GetDriverByName('ESRI Shapefile')
    if os.path.exists('./tmp/grid.shp'):
        os.remove('./tmp/grid.shp')
    outDataSource = outDriver.CreateDataSource('./tmp/grid.shp')
    outLayer = outDataSource.CreateLayer('./tmp/grid.shp',geom_type=ogr.wkbPolygon )
    featureDefn = outLayer.GetLayerDefn()

    # create grid cells
    countcols = 0
    while countcols < cols:
        countcols += 1

        # reset envelope for rows
        ringYtop = ringYtopOrigin
        ringYbottom =ringYbottomOrigin
        countrows = 0

        while countrows < rows:
            countrows += 1
            ring = ogr.Geometry(ogr.wkbLinearRing)
            ring.AddPoint(ringXleftOrigin, ringYtop)
            ring.AddPoint(ringXrightOrigin, ringYtop)
            ring.AddPoint(ringXrightOrigin, ringYbottom)
            ring.AddPoint(ringXleftOrigin, ringYbottom)
            ring.AddPoint(ringXleftOrigin, ringYtop)
            poly = ogr.Geometry(ogr.wkbPolygon)
            poly.AddGeometry(ring)

            # add new geom to layer
            outFeature = ogr.Feature(featureDefn)
            outFeature.SetGeometry(poly)
            outLayer.CreateFeature(outFeature)
            outFeature.Destroy

            # new envelope for next poly
            ringYtop = ringYtop - gridHeight
            ringYbottom = ringYbottom - gridHeight

        # new envelope for next poly
        ringXleftOrigin = ringXleftOrigin + gridWidth
        ringXrightOrigin = ringXrightOrigin + gridWidth

    # Close DataSources
    outDataSource.Destroy()
    
    #Clip poly SHP by data convex hull
    print "Clipping polygon SHP by convex hull..."
    spatialReference = osr.SpatialReference()
    spatialReference.ImportFromProj4('+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
    polyShapefile = options.polyInput
    pointShapefile = "./tmp/"+inputname+".shp"
    driver = ogr.GetDriverByName("ESRI Shapefile")
    polyDataSource = driver.Open(polyShapefile, 0)
    polyLayer = polyDataSource.GetLayer()
    clipData = driver.CreateDataSource("./tmp")
    clipLayer = clipData.CreateLayer("clip",spatialReference,ogr.wkbPolygon)
    polyLayer.Clip(outConLayer,clipLayer)
    outConDataSource.Destroy()
    pointDataSource = driver.Open(pointShapefile, 0)
    pointLayer = pointDataSource.GetLayer()
    aField = ogr.FieldDefn("Area", ogr.OFTReal)
    cField = ogr.FieldDefn("Count", ogr.OFTReal)
    sField = ogr.FieldDefn("SumAttr", ogr.OFTReal)
    dField = ogr.FieldDefn("CountDens", ogr.OFTReal)
    tField = ogr.FieldDefn("AttrDens",ogr.OFTReal)
    clipLayer.CreateField(aField)
    clipLayer.CreateField(cField)
    clipLayer.CreateField(sField)
    clipLayer.CreateField(dField)
    clipLayer.CreateField(tField)
    
    #Iterate over SHP features
    print "Calculating point density..."
    for feature in clipLayer:
        geom = feature.GetGeometryRef()
        area = geom.Area()
        pointLayer.SetSpatialFilter(geom)
        count = pointLayer.GetFeatureCount()
        density = count/area
        print density
        sumattr=0
        if options.zfield and count>0:
            for point in pointLayer:
                attr = point.GetField(options.zfield)
                try:
                    sumattr+=float(attr)
                except:
                    print "Invalid attr"
            attrdensity=sumattr/area
        else:
            attrdensity=density
        feature.SetField("Area",area)
        feature.SetField("Count",count)
        feature.SetField("SumAttr",sumattr)
        feature.SetField("CountDens",density)
        feature.SetField("AttrDens",attrdensity)
        clipLayer.SetFeature(feature)
        
    # Close DataSource
    polyDataSource.Destroy()
    pointDataSource.Destroy()
Exemple #38
0
## from https://github.com/OSGeo/gdal/blob/master/gdal/swig/python/gdal-utils/osgeo_utils/samples

import ogr2ogr

shp = "C:\\Data\\Projects_GIS\\2020\\001_a_GEODB\\BRK_Mapping_Pluvial_IT_US\\pluvial_brk_302.shp"
out_gjson= "C:\\Temp\\dump\\output.geojson"

ogr2ogr.main([
  'ogr2ogr',
  '-f', 'GeoJSON', 'output.geojson' ,
  shp
])
Exemple #39
0
        conn = pymssql.connect(host='sql2014a8',
                               database='abm_13_2_3',
                               as_dict=True)
        cursor = conn.cursor()
        cursor.execute(
            'SELECT scenario_id, path, scenario_year FROM ref.scenario WHERE scenario_id = %s'
            % (scenario_id))
        row = cursor.fetchone()

        if row:
            print scenario_path
            ogr2ogr.main([
                'E:\\OSGeo4W64\\bin\\ogr2ogr.exe', '-f', 'ESRI Shapefile',
                '{0}hwy_load_{1}.shp'.format(scenario_path,
                                             row['scenario_id']),
                'MSSQL:server=sql2014a8;database=abm_13_2_3;trusted_connection=yes;',
                '-sql',
                'SELECT scenario_id as scen_id,scenario_year as scen_yr,abm_version as abm_ver,hwy_link_id as hwy_link,hwycov_id,GEOMETRY::STGeomFromWKB(shape.STAsBinary(), 4326) as shape,link_name,length_mile as len_mile,count_jur,count_stat,count_loc,ifc,ifc_desc,ihov,itruck,post_speed,iway,imed,from_node,from_nm,to_node,to_nm,COALESCE(total_flow,0) as total_flow,COALESCE(ab_tot_flow,0) as abTotFlow,COALESCE(ba_tot_flow,0) as baTotFlow,COALESCE(ab_tot_flow, 0) * length_mile as ab_vmt, COALESCE(ba_tot_flow, 0) * length_mile as ba_vmt,COALESCE(ab_tot_flow,0) * length_mile + COALESCE(ba_tot_flow, 0) * length_mile as vmt,COALESCE(ROUND(((ab_ea_min * ab_ea_flow) + (ab_am_min * ab_am_flow) + (ab_md_min * ab_md_flow) + (ab_pm_min * ab_pm_flow) + (ab_ev_min * ab_ev_flow)) / 60, 3), 0) as ab_vht,COALESCE(ROUND(((ba_ea_min * ba_ea_flow) + (ba_am_min * ba_am_flow) + (ba_md_min * ba_md_flow) + (ba_pm_min * ba_pm_flow) + (ba_ev_min * ba_ev_flow)) / 60, 3), 0) as ba_vht,COALESCE(ROUND(((ab_ea_min * ab_ea_flow) + (ab_am_min * ab_am_flow) + (ab_md_min * ab_md_flow) + (ab_pm_min * ab_pm_flow) + (ab_ev_min * ab_ev_flow) + (ba_ea_min * ba_ea_flow) + (ba_am_min * ba_am_flow) + (ba_md_min * ba_md_flow) + (ba_pm_min * ba_pm_flow) + (ba_ev_min * ba_ev_flow)) / 60, 3), 0) as vht,COALESCE(ab_ea_flow, 0) as ab_ea_flow,COALESCE(ba_ea_flow, 0) as ba_ea_flow,COALESCE(ab_am_flow, 0) as ab_am_flow,COALESCE(ba_am_flow, 0) as ba_am_flow,COALESCE(ab_md_flow, 0) as ab_md_flow,COALESCE(ba_md_flow, 0) as ba_md_flow,COALESCE(ab_pm_flow, 0) as ab_pm_flow,COALESCE(ba_pm_flow, 0) as ba_pm_flow,COALESCE(ab_ev_flow, 0) as ab_ev_flow,COALESCE(ba_ev_flow, 0) as ba_ev_flow,COALESCE(ab_auto_flow, 0) as abAutoFlow,COALESCE(ba_auto_flow, 0) as baAutoFlow,COALESCE(ab_sov_flow, 0) as abSovFlow,COALESCE(ba_sov_flow, 0) as baSovFlow,COALESCE(ab_hov2_flow, 0) as abHov2Flow,COALESCE(ba_hov2_flow, 0) as baHov2Flow,COALESCE(ab_hov3_flow, 0) as abHov3Flow,COALESCE(ba_hov3_flow, 0) as baHov3Flow,COALESCE(ab_truck_flow, 0) as abTrucFlow,COALESCE(ba_truck_flow, 0) as baTrucFlow,COALESCE(ab_bus_flow, 0) as abBusFlow,COALESCE(ba_bus_flow, 0) as baBusFlow,ab_ea_mph,ba_ea_mph,ab_am_mph,ba_am_mph,ab_md_mph,ba_md_mph,ab_pm_mph,ba_pm_mph,ab_ev_mph,ba_ev_mph,ab_ea_min,ba_ea_min,ab_am_min,ba_am_min,ab_md_min,ba_md_min,ab_pm_min,ba_pm_min,ab_ev_min,ba_ev_min,ab_ea_lane,ba_ea_lane,ab_am_lane,ba_am_lane,ab_md_lane,ba_md_lane,ab_pm_lane,ba_pm_lane,ab_ev_lane,ba_ev_lane,ab_ea_voc,ba_ea_voc,ab_am_voc,ba_am_voc,ab_md_voc,ba_md_voc,ab_pm_voc,ba_pm_voc,ab_ev_voc,ba_ev_voc FROM abm.fn_hwy_vol_by_mode_and_tod ({0})'
                .format(row['scenario_id']), '-overwrite', '-s_srs',
                'EPSG:4326', '-t_srs', 'EPSG:2230'
            ])
            #subprocess.check_call(os_command))
            #os_command = ['E:\\OSGeo4W64\\bin\\ogr2ogr.exe', '-f', 'ESRI Shapefile',
            #          '{0}hwy_load_{1}.shp'.format(scenario_path, row['scenario_id']),
            #          'MSSQL:server=sql2014a8;database=abm_13_2_3;trusted_connection=yes;', '-sql',
            #          'SELECT scenario_id as scen_id,scenario_year as scen_yr,abm_version as abm_ver,hwy_link_id as hwy_link,hwycov_id,GEOMETRY::STGeomFromWKB(shape.STAsBinary(), 4326) as shape,link_name,length_mile as len_mile,count_jur,count_stat,count_loc,ifc,ifc_desc,ihov,itruck,post_speed,iway,imed,from_node,from_nm,to_node,to_nm,COALESCE(total_flow,0) as total_flow,COALESCE(ab_tot_flow,0) as abTotFlow,COALESCE(ba_tot_flow,0) as baTotFlow,COALESCE(ab_tot_flow, 0) * length_mile as ab_vmt, COALESCE(ba_tot_flow, 0) * length_mile as ba_vmt,COALESCE(ab_tot_flow,0) * length_mile + COALESCE(ba_tot_flow, 0) * length_mile as vmt,COALESCE(ROUND(((ab_ea_min * ab_ea_flow) + (ab_am_min * ab_am_flow) + (ab_md_min * ab_md_flow) + (ab_pm_min * ab_pm_flow) + (ab_ev_min * ab_ev_flow)) / 60, 3), 0) as ab_vht,COALESCE(ROUND(((ba_ea_min * ba_ea_flow) + (ba_am_min * ba_am_flow) + (ba_md_min * ba_md_flow) + (ba_pm_min * ba_pm_flow) + (ba_ev_min * ba_ev_flow)) / 60, 3), 0) as ba_vht,COALESCE(ROUND(((ab_ea_min * ab_ea_flow) + (ab_am_min * ab_am_flow) + (ab_md_min * ab_md_flow) + (ab_pm_min * ab_pm_flow) + (ab_ev_min * ab_ev_flow) + (ba_ea_min * ba_ea_flow) + (ba_am_min * ba_am_flow) + (ba_md_min * ba_md_flow) + (ba_pm_min * ba_pm_flow) + (ba_ev_min * ba_ev_flow)) / 60, 3), 0) as vht,COALESCE(ab_ea_flow, 0) as ab_ea_flow,COALESCE(ba_ea_flow, 0) as ba_ea_flow,COALESCE(ab_am_flow, 0) as ab_am_flow,COALESCE(ba_am_flow, 0) as ba_am_flow,COALESCE(ab_md_flow, 0) as ab_md_flow,COALESCE(ba_md_flow, 0) as ba_md_flow,COALESCE(ab_pm_flow, 0) as ab_pm_flow,COALESCE(ba_pm_flow, 0) as ba_pm_flow,COALESCE(ab_ev_flow, 0) as ab_ev_flow,COALESCE(ba_ev_flow, 0) as ba_ev_flow,COALESCE(ab_auto_flow, 0) as abAutoFlow,COALESCE(ba_auto_flow, 0) as baAutoFlow,COALESCE(ab_sov_flow, 0) as abSovFlow,COALESCE(ba_sov_flow, 0) as baSovFlow,COALESCE(ab_hov2_flow, 0) as abHov2Flow,COALESCE(ba_hov2_flow, 0) as baHov2Flow,COALESCE(ab_hov3_flow, 0) as abHov3Flow,COALESCE(ba_hov3_flow, 0) as baHov3Flow,COALESCE(ab_truck_flow, 0) as abTrucFlow,COALESCE(ba_truck_flow, 0) as baTrucFlow,COALESCE(ab_bus_flow, 0) as abBusFlow,COALESCE(ba_bus_flow, 0) as baBusFlow,ab_ea_mph,ba_ea_mph,ab_am_mph,ba_am_mph,ab_md_mph,ba_md_mph,ab_pm_mph,ba_pm_mph,ab_ev_mph,ba_ev_mph,ab_ea_min,ba_ea_min,ab_am_min,ba_am_min,ab_md_min,ba_md_min,ab_pm_min,ba_pm_min,ab_ev_min,ba_ev_min,ab_ea_lane,ba_ea_lane,ab_am_lane,ba_am_lane,ab_md_lane,ba_md_lane,ab_pm_lane,ba_pm_lane,ab_ev_lane,ba_ev_lane,ab_ea_voc,ba_ea_voc,ab_am_voc,ba_am_voc,ab_md_voc,ba_md_voc,ab_pm_voc,ba_pm_voc,ab_ev_voc,ba_ev_voc FROM abm.fn_hwy_vol_by_mode_and_tod ({0})'.format(
            #              row['scenario_id']),
            #          '-overwrite', '-s_srs', 'EPSG:4326', '-t_srs', 'EPSG:2230']
            #subprocess.check_call(os_command)
        else:
            print 'Scenario Not Available: %s, %s' % (scenario_id,
                                                      scenario_path)
Exemple #40
0
def getBoundingBox(name, path):
    """returns the bounding Box of supported Datatypes and standards in WGS84.

    supported data: Shapefile (.shp), GeoJson (.json/.geojson), GeoTIFF (.tif), netCDF (.nc), GeoPackage (.gpkg), alle ISO19xxx standardisiete Formate, CSV on the web
    
    @param path Path to the file
    @param name name of the file with extension
    @returns a boundingbox as an array in a tuple in WGS84, formated like ([minLong, minLat, maxLong, maxLat], None)
    """
    # connect name and path to file
    filepath = "%s\%s" % (path, name)
    # get file extension
    filename, file_extension = os.path.splitext(filepath)
    print(file_extension)
    #shapefile handelig
    if file_extension == ".shp":
        try:
            myshp = open(filepath, "rb")
            sf = shapefile.Reader(shp=myshp)
        # error
        except:
            return (None, "File Error!")
        else: # if no error accured
            return (sf.bbox, None)


    # geojson handeling
    elif file_extension == ".json" or file_extension == ".geojson":
        try:
            myGeojson = pygeoj.load(filepath=filepath)
            return (myGeojson.bbox, None)
        except ValueError: # if geojson is not a featureCollection
            myJson = open(filepath, "rb")
            myJson = json.load(myJson)

            # raw FeatureCollection
            myGeojson = {
                "type": "FeatureCollection",
                "features": []
            }

            myGeojson.get("features").append(myJson)
            myGeojson = pygeoj.load(data=myGeojson)
            return (myGeojson.bbox, None)
        # errors
        except:
            return (None, "File Error!")

    elif file_extension == ".tif" or file_extension == ".tiff":
        # @see https://stackoverflow.com/questions/2922532/obtain-latitude-and-longitude-from-a-geotiff-file
        try:
            # get the existing coordinate system
            ds = gdal.Open(filepath)
            old_cs= osr.SpatialReference()
            old_cs.ImportFromWkt(ds.GetProjectionRef())

            # create the new coordinate system
            wgs84_wkt = """
            GEOGCS["WGS 84",
                DATUM["WGS_1984",
                    SPHEROID["WGS 84",6378137,298.257223563,
                        AUTHORITY["EPSG","7030"]],
                    AUTHORITY["EPSG","6326"]],
                PRIMEM["Greenwich",0,
                    AUTHORITY["EPSG","8901"]],
                UNIT["degree",0.01745329251994328,
                    AUTHORITY["EPSG","9122"]],
                AUTHORITY["EPSG","4326"]]"""
            new_cs = osr.SpatialReference()
            new_cs .ImportFromWkt(wgs84_wkt)

            # create a transform object to convert between coordinate systems
            transform = osr.CoordinateTransformation(old_cs,new_cs) 

            #get the point to transform, pixel (0,0) in this case
            width = ds.RasterXSize
            height = ds.RasterYSize
            gt = ds.GetGeoTransform()
            minx = gt[0]
            miny = gt[3] + width*gt[4] + height*gt[5] 
            maxx = gt[0] + width*gt[1] + height*gt[2]
            maxy = gt[3] 

            #get the coordinates in lat long
            latlongmin = transform.TransformPoint(minx,miny)
            latlongmax = transform.TransformPoint(maxx,maxy)
            bbox = [latlongmin[0], latlongmin[1], latlongmax[0], latlongmax[1]]
            return (bbox, None)
        # errors
        except:
            return (None, "File Error or TIFF is not GeoTIFF")

    # netCDF handeling
    elif file_extension == ".nc":
        try:
            # https://gis.stackexchange.com/questions/270165/gdal-to-acquire-netcdf-like-metadata-structure-in-python
            ds = xr.open_dataset(filepath)
            # transform coordinates section in a dictionary
            coordinates = ds.to_dict()['coords']
            # get the coordinates as a list
            lats = coordinates['latitude']['data']
            longs = coordinates['longitude']['data']

            # taking the smallest and highest coordinates from the lists
            bbox = [min(longs), min(lats), max(longs), max(lats)]
            return (bbox, None)
        # errors
        except KeyError:
            return (None, "coordinate names may be spelled wrong: should be 'latitude'/'longitude")
        except:
            return (None, "File Error!")

    # handeling geoPackage
    elif file_extension == ".gpkg":
        # @see https://stackoverflow.com/questions/35945437/python-gdal-projection-conversion-from-wgs84-to-nztm2000-is-not-correct
        try:
            conn = sqlite3.connect(filepath)
            c = conn.cursor()
            c.execute("""   SELECT min(min_x), min(min_y), max(max_x), max(max_y), srs_id
                            FROM gpkg_contents
                            WHERE NOT srs_id = 4327
                            GROUP BY srs_id
                    """)
            row = c.fetchall()
            bboxes = []

            if row == None:
                assert LookupError("No valid data detected (EPSG:4327 not supported)")

            for line in row:
                bboxes.append([line[0], line[1], line[2], line[3], line[4]])
            
            wgs84bboxen = []
            for bbox in bboxes:
                box = CRSTransform(bbox[0], bbox[1], bbox[4])
                box.extend(CRSTransform(bbox[2], bbox[3], bbox[4]))
                wgs84bboxen.append(box)

            bbox = [wgs84bboxen[0][0], wgs84bboxen[0][1], wgs84bboxen[0][2], wgs84bboxen[0][3]]
            for wgs84Box in wgs84bboxen:
                if wgs84Box[0] < bbox[0]:
                    bbox[0] = wgs84Box[0]
                if wgs84Box[1] < bbox[1]:
                    bbox[1] = wgs84Box[1]
                if wgs84Box[2] > bbox[2]:
                    bbox[2] = wgs84Box[2]
                if wgs84Box[3] > bbox[3]:
                    bbox[3] = wgs84Box[3]
            return(bbox, None)
        except LookupError as e:
            return(None, e)
        except:
            return (None, "File Error!")
        finally:
            try:
                conn.close()
            except:
                pass


    # csv or csv formated textfile handeling (csv on the web)
    elif file_extension == ".csv" or file_extension == ".txt":
        # @see https://stackoverflow.com/questions/16503560/read-specific-columns-from-a-csv-file-with-csv-module
        try: # finding the correct collums for latitude and longitude
            csvfile = open(filepath)
            head = csv.reader(csvfile, delimiter=' ', quotechar='|')
            # get the headline an convert, if possible, ';' to ',' 
            # and seperate each word devided by a ',' into an array 
            header = next(head)[0].replace(";", ",").split(",")
            lng=None 
            lat=None
            # searching for valid names for latitude and longitude
            for t in header:
                if t == "longitude":
                    lng = "longitude"
                if t == "latitude":
                    lat = "latitude"
                if t == "lon":
                    lng = "lon"
                if t == "lng":
                    lng = "lng"
                if t == "lat":
                    lat = "lat"

            # if there is no valid name or coordinates, an exception is thrown an cought with an errormassage
            if(lat == None or lng == None):
                raise ValueError("pleas rename latitude an longitude: latitude/lat, longitude/lon/lng")
        # errors
        except ValueError as e:
            return (None, e)
        except:
            return (None, "File Error!")
        
        # if no error accured
        else:
            try:
                df = pd.read_csv(filepath, header=0)
                # get all coordinates from found collums
                latitudes = df[lng].tolist()
                longitudes = df[lat].tolist()
                
                # taking the smallest and highest coordinates from the lists
                bbox = [min(longitudes), min(latitudes), max(longitudes), max(latitudes)]
                return (bbox, None)

            # in case the words are separated by a ';' insted of a comma
            except KeyError:
                try:
                    # tell the reader that the seperator is a ';'
                    df = pd.read_csv(filepath, header=0, sep=';')
                    # get all coordinates from found collums
                    latitudes = df[lng].tolist()
                    longitudes = df[lat].tolist()
                    
                    # taking the smallest and highest coordinates from the lists
                    bbox = [min(longitudes), min(latitudes), max(longitudes), max(latitudes)]
                    return (bbox, None)
                # the csv is not valid
                except KeyError:
                    return (None, "Pleas seperate your data with either ',' or ';'!")
            # errors
            except:
                return (None, "File Error: File not found or check if your csv file is valid to 'csv on the web'")

    # gml handeling
    elif file_extension == ".gml" or file_extension == ".xml" or file_extension == ".kml":
        try:
            # @see https://gis.stackexchange.com/questions/39080/using-ogr2ogr-to-convert-gml-to-shapefile-in-python
            # convert the gml file to a GeoJSON file
            ogr2ogr.main(["","-f", "GeoJSON", "%s.json" % (name), filepath])
            # srcDS = gdal.OpenEx(filepath)
            # ds = gdal.VectorTranslate('output.json', srcDS, format='GeoJSON')

            # get boundingbox from generated GeoJSON file
            myGeojson = pygeoj.load(filepath="%s.json"%name)
            click.echo(myGeojson.bbox)
            # delete generated GeoJSON file
            return (myGeojson.bbox, None)
        # errors
        except:
            return (None, "file not found or your gml/xml/kml data is not valid")
        finally:
            try:
                os.remove("%s.json"%name)
            except:
                pass
                
    # if the extension has not been implemented yet or won't be supported
    else:
        return (None, "type %s not yet supported" % file_extension)
Exemple #41
0
import os
import ogr2ogr

gpx_directory_path = r"C:\Users\Fran\Downloads\export_22552451\activities"

gpx_list = os.listdir(gpx_directory_path)

for gpx_file in gpx_list:
    if gpx_file.endswith(".gpx"):
        full_gpx_path = os.path.join(gpx_directory_path, gpx_file)
        print(f"Converting {full_gpx_path} to shapefile")
        output_shapefile = os.path.splitext(full_gpx_path)[0] + ".shp"
        try:
            ogr2ogr.main(
                ["", "-f", "ESRI Shapefile", output_shapefile, full_gpx_path])
            ## download ogr2ogr.py from here https://svn.osgeo.org/gdal/trunk/gdal/swig/python/samples/ogr2ogr.py
            print(f"Finished Converting {full_gpx_path} to shapefile \n")
        except Exception as e:
            print("failed to convert")
            print(e)

print("Finished converting all gpx to shp")
def StringToRaster(raster):
    # Check if string is provided

    fileInfo = QFileInfo(raster)
    path = fileInfo.filePath()
    baseName = fileInfo.baseName()

    layer = QgsRasterLayer(path, baseName)
    #QgsMapLayerRegistry.instance().addMapLayer(layer)
    
    entries = []
    # Define band1
    boh1 = QgsRasterCalculatorEntry()
    boh1.ref = 'ndvi20160607sentinel@5'
    boh1.raster = layer
    boh1.bandNumber = 5
    entries.append( boh1 )
    	
    # Process calculation with input extent and resolution
    calc = QgsRasterCalculator( '(ndvi20160607sentinel@5) * 166.67 + 111.67', 'C:/Hackathon Farmhack data/Output/outputfile.tif', 'GTiff', layer.extent(), layer.width(), layer.height(), entries )
    calc.processCalculation()
    
    fileInfo = QFileInfo('C:/Hackathon Farmhack data/Output/outputfile.tif')
    path = fileInfo.filePath()
    baseName = fileInfo.baseName()
    
    layer = QgsRasterLayer(path, baseName)
    #QgsMapLayerRegistry.instance().addMapLayer(layer)
    
    if layer.isValid() is True:
        print "Layer was loaded successfully!"
    
    else:
        print "Unable to read basename and file path - Your string is probably invalid"
    
    shape = QgsVectorLayer('C:/Hackathon Farmhack data/perceel-hier-rdnew.geojson', 'perceel', 'ogr')
    #QgsMapLayerRegistry.instance().addMapLayer(shape)
    xmin = (shape.extent().xMinimum()) #extract the minimum x coord from our layer
    xmax = (shape.extent().xMaximum()) #extract our maximum x coord from our layer
    ymin = (shape.extent().yMinimum()) #extract our minimum y coord from our layer
    ymax = (shape.extent().yMaximum()) #extract our maximum y coord from our layer
    #prepare the extent in a format the VectorGrid tool can interpret (xmin,xmax,ymin,ymax)
    extent = str(xmin)+ ',' + str(xmax)+ ',' +str(ymin)+ ',' +str(ymax)  
    
    # raster the given shape
    processing.runalg('qgis:vectorgrid', extent, 20, 20, 0, 'C:/Hackathon Farmhack data/Output/rasterShapes.geojson')
    
    shapeRaster = QgsVectorLayer('C:/Hackathon Farmhack data/Output/rasterShapes.geojson', 'perceelRaster', 'ogr')
    shapeRaster.setCrs(QgsCoordinateReferenceSystem(28992,  QgsCoordinateReferenceSystem.EpsgCrsId))
    #QgsMapLayerRegistry.instance().addMapLayer(shapeRaster)
    
    #clip the raster returned
    processing.runalg('qgis:clip', shapeRaster, shape, 'C:/Hackathon Farmhack data/Output/clippedRaster.shp')

    #define oldPath and newPath
    ogr2ogr.main(["","-f", "ESRI Shapefile", "-s_srs", "epsg:28992", "-t_srs", "epsg:32632", "C:/Hackathon Farmhack data/Output/clippedRasterNew.shp", "C:/Hackathon Farmhack data/Output/clippedRaster.shp"])
    
    clippedRaster = QgsVectorLayer('C:/Hackathon Farmhack data/Output/clippedRasterNew.shp', 'clippedRaster', 'ogr')
    clippedRaster.setCrs(QgsCoordinateReferenceSystem(32632,  QgsCoordinateReferenceSystem.EpsgCrsId))
    #QgsMapLayerRegistry.instance().addMapLayer(clippedRaster)
    
    #zonalstatistics
    processing.runalg('qgis:zonalstatistics', layer, 1, clippedRaster, '', False, 'C:/Hackathon Farmhack data/Output/filledRaster.geojson')
    filledRaster = QgsVectorLayer('C:/Hackathon Farmhack data/Output/filledRaster.geojson', 'filledRaster', 'ogr')
    filledRaster.setCrs(QgsCoordinateReferenceSystem(32632,  QgsCoordinateReferenceSystem.EpsgCrsId))
    #QgsMapLayerRegistry.instance().addMapLayer(filledRaster)    
	
    ogr2ogr.main(["","-f", "GeoJSON", "-s_srs", "epsg:32632", "-t_srs", "epsg:4326", "C:/Hackathon Farmhack data/Output/taakkaart.geojson", "C:/Hackathon Farmhack data/Output/filledRaster.geojson"])
    taakkaart = QgsVectorLayer('C:/Hackathon Farmhack data/Output/taakkaart.geojson', 'taakkaart', 'ogr')
    QgsMapLayerRegistry.instance().addMapLayer(taakkaart)    
Exemple #43
0
def main(argv):
   #Get input and output from command line args
   parser = OptionParser()
   parser.add_option("-i", "--input", dest="inputfile",
                     help="Input csv file path", metavar="FILE")
   parser.add_option("-o", "--output", dest="outfile", default="./tmp/mbtiles.mbtiles",
                     help="Output MBtiles file path. Default is ./tmp/mbtiles.mbtiles", metavar="FILE")
   parser.add_option("-a", "--alg", dest="alg", default="invdist:power=2.0:smoothing=1.0",
                     help="GDAL grid algorithm. Default is 'invdist:power=2.0:smoothing=1.0'")
   parser.add_option("-m", "--zoom", dest="zoom", default="1-3",
                     help="Zoom level in single quotes. Default is '1-3'")
   parser.add_option("-c", "--color1", dest="color1", default='255 255 0',
                     help="RGB color for lowest level, Default '255 255 0' for yellow")
   parser.add_option("-d", "--color2", dest="color2", default='255 0 0',
                     help="RGB color for highest level, Default is '255 0 0' for red")
   parser.add_option("-n", "--nearest", dest="nearest", default=False,
                     help="If true, raster values will be assigned to nearest step, rather than continuous. Default is continuous. To be used in conjunction with -s")
   parser.add_option("-s", "--steps", dest="steps", default=10,
                     help="Number of steps in the color relief if specified and -n is 'True'. Default is 10")
   parser.add_option("-r", "--rows", dest="rows", default=1000,
                     help="Grid rows. Default is 1000")
   parser.add_option("-l", "--cols", dest="cols", default=1000,
                     help="Grid columns. Default is 1000")
   parser.add_option("-x", "--longitude", dest="longitude", default='longitude',
                     help="CSV longitude header. Default is 'longitude'")
   parser.add_option("-y", "--latitude", dest="latitude", default='latitude',
                     help="CSV latitude header. Default is 'latitude'")
   parser.add_option("-z", "--zfield", dest="zfield",
                     help="CSV z-field header")
   parser.add_option("-p", "--clipshape", dest="clipshape", default="./tmp/convexhull.shp",
                     help="Shapefile to clip tif. Default is generated convex-hull")
   (options, args) = parser.parse_args()
   basename = os.path.basename(options.inputfile)
   inputname, inputextension = os.path.splitext(basename)
   #Clean up
   try:
      shutil.rmtree("./tmp")
   except:
      print "No cleanup required... Continuing..."
   #Write DBF
   os.makedirs("./tmp")
   ogr2ogr.main(["","-f","ESRI Shapefile","./tmp",options.inputfile])
   #Write VRT
   print "Writing CSV VRT..."
   vrt = open('./tmp/'+inputname+'.vrt','w')
   vrt.write("<OGRVRTDataSource>\n")
   vrt.write("\t<OGRVRTLayer name='"+inputname+"'>\n")
   vrt.write("\t\t<SrcDataSource relativeToVRT='1'>./</SrcDataSource>\n")
   vrt.write("\t\t<GeometryType>wkbPoint</GeometryType>\n")
   vrt.write("\t\t<LayerSRS>WGS84</LayerSRS>\n")
   vrt.write("\t\t<GeometryField encoding='PointFromColumns' x='"+options.longitude+"' y='"+options.latitude+"'/>\n")
   vrt.write("\t</OGRVRTLayer>\n")
   vrt.write("</OGRVRTDataSource>")
   vrt.close()
   #Write SHP
   print "Converting to SHP..."
   ogr2ogr.main(["","-f","ESRI Shapefile","./tmp","./tmp/"+inputname+".vrt","-overwrite"])
   
   #Rasterize SHP
   print "Rasterizing..."
   rasterize = subprocess.Popen(["gdal_grid","-outsize",str(options.rows),str(options.cols),"-a",options.alg,"-zfield",options.zfield,"./tmp/"+inputname+".shp","-l",inputname,"./tmp/"+inputname+".tif","--config", "GDAL_NUM_THREADS", "ALL_CPUS"], stdout=subprocess.PIPE,stderr=subprocess.PIPE)
   rOutput = rasterize.communicate()[0]
   print rOutput
   
   #Convex hull
   # Get a Layer
   print "Calculating convex hull..."
   inShapefile = "./tmp/"+inputname+".shp"
   inDriver = ogr.GetDriverByName("ESRI Shapefile")
   inDataSource = inDriver.Open(inShapefile, 0)
   inLayer = inDataSource.GetLayer()
   
   # Collect all Geometry
   geomcol = ogr.Geometry(ogr.wkbGeometryCollection)
   for feature in inLayer:
       geomcol.AddGeometry(feature.GetGeometryRef())
   
   # Calculate convex hull
   convexhull = geomcol.ConvexHull()
   
   # Save extent to a new Shapefile
   outShapefile = "./tmp/convexhull.shp"
   outDriver = ogr.GetDriverByName("ESRI Shapefile")
   
   # Remove output shapefile if it already exists
   if os.path.exists(outShapefile):
       outDriver.DeleteDataSource(outShapefile)
   
   # Create the output shapefile
   outDataSource = outDriver.CreateDataSource(outShapefile)
   outLayer = outDataSource.CreateLayer("convexhull", geom_type=ogr.wkbPolygon)
   
   # Add an ID field
   idField = ogr.FieldDefn("id", ogr.OFTInteger)
   outLayer.CreateField(idField)
   
   # Create the feature and set values
   featureDefn = outLayer.GetLayerDefn()
   feature = ogr.Feature(featureDefn)
   feature.SetGeometry(convexhull)
   feature.SetField("id", 1)
   outLayer.CreateFeature(feature)
   
   # Close DataSource
   inDataSource.Destroy()
   outDataSource.Destroy()
   
   #Write color relief txt
   print "Writing color relief txt..."
   steps = int(options.steps)
   colorTxt = open("./tmp/"+"color.txt","w")
   colorTxt.write("0% "+options.color1+"\n")
   percentStep = 100/float(steps)
   for step in range(1,steps):
      percentR = str(((int(options.color1.split()[0])*(steps-step))+(int(options.color2.split()[0])*step))/steps)
      percentG = str(((int(options.color1.split()[1])*(steps-step))+(int(options.color2.split()[1])*step))/steps)
      percentB = str(((int(options.color1.split()[2])*(steps-step))+(int(options.color2.split()[2])*step))/steps)
      colorTxt.write(str(percentStep*step)+"% "+percentR+" "+percentG+" "+percentB+" "+"\n")
   colorTxt.write("100% "+options.color2)
   colorTxt.close()
   
   #Color the raster
   print "Colorizing raster..."
   if options.nearest:
      colorize = subprocess.Popen(["gdaldem", "color-relief","./tmp/"+inputname+".tif", "./tmp/color.txt", "./tmp/"+inputname+"_color.tif","-nearest_color_entry"], stdout=subprocess.PIPE,stderr=subprocess.PIPE)
   else:
      colorize = subprocess.Popen(["gdaldem", "color-relief","./tmp/"+inputname+".tif", "./tmp/color.txt", "./tmp/"+inputname+"_color.tif"], stdout=subprocess.PIPE,stderr=subprocess.PIPE)
   cOutput = colorize.communicate()[0]
   print cOutput
   
   #Warp for compression and clip to convex hull
   print "Warping raster..."
   warp = subprocess.Popen(["gdalwarp","-co","compress=deflate", "-co", "tiled=yes", "-r", "lanczos", "-cutline", options.clipshape, "-dstnodata", "0", "./tmp/"+inputname+"_color.tif", "./tmp/"+inputname+"_final.tif"], stdout=subprocess.PIPE,stderr=subprocess.PIPE)
   wOutput = warp.communicate()[0]
   print wOutput
   
   #Draw VRT for parallel gdal2tiles
   print "Building tile VRT..."
   buildVrt = subprocess.Popen(["gdalbuildvrt","./tmp/tiles.vrt", "./tmp/"+inputname+"_final.tif"], stdout=subprocess.PIPE,stderr=subprocess.PIPE)
   vOutput = buildVrt.communicate()[0]
   print vOutput
   
   #Draw png tiles
   print "Drawing tiles..."
   argv = gdal.GeneralCmdLineProcessor( ['./gdal2tiles.py','-z',options.zoom,'./tmp/tiles.vrt','./tmp/tiles'] )
   if argv:
      c1 = Configuration(argv[1:])
      tile=c1.create_tile()
      gdal2tiles.process(c1,tile)
       
   #Create MBtiles
   print "Generating MBtiles file..."
   mbtiles = subprocess.Popen(["mb-util","./tmp/tiles",options.outfile,"--scheme","tms"], stdout=subprocess.PIPE,stderr=subprocess.PIPE)
   mOutput = mbtiles.communicate()[0]
   print mOutput
   print "Done."
def convertSHPtoKML(filename):
    output_filename = str(filename)+"KML"
    input_filename = filename
    ogr2ogr.main(["", "-f", "KML", str(output_filename)+".kml", str(input_filename)+".shp"])
    return True