コード例 #1
0
def crop_by_vector_by_vector(crop_shp_path, input_path, output_name=None):
    # ogr2ogr -skipfailures -clipsrc afg.shp output.shp G2014_2013_1_mid.shp
    output_path = None
    if output_name is None:
        output_path = create_tmp_filename('shp', 'tmp_shp', 'tmp_shp_' + str(uuid.uuid4()), False)
    else:
        # TODO:
        # get the folder by output_path filename
        # check if the folder is created, otherwise create it
        log.warn("TODO: get the folder by output_path filename. check if the folder is created, otherwise create it.")
        output_path = create_tmp_filename('shp', output_name, 'tmp_shp_' + str(uuid.uuid4()), False)
    args = [
        'ogr2ogr',
        '-skipfailures',
        # TODO: optional overwrite
        '-overwrite',
        '-clipsrc',
        crop_shp_path,
        output_path,
        input_path,
    ]
    try:
        cmd = " ".join(args)
        log.info(cmd)
        p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
        p.wait()
    except Exception, e:
        log.error(e)
        raise Exception(e)
コード例 #2
0
def crop_by_vector_from_db(input_file, db_connection_string, query, srcnodata='nodata', dstnodata='nodata'):
    log.info(query)
    output_file_gdal_warp = create_tmp_filename('.tif', 'gdal_warp')
    output_file = create_tmp_filename('.tif', 'output')
    log.info(input_file)

    # crop the layer on cutline
    args = [
        "gdalwarp",
        "-q",
        "-multi",
        "-of", "GTiff",
        "-cutline", db_connection_string,
        "-csql", query,
        "-srcnodata", str(srcnodata),
        "-dstnodata", str(dstnodata),
        # -crop_to_cutline is needed otherwise the layer is not cropped
        # TODO: resolve shifting problem
        # "-crop_to_cutline",
        # "-dstalpha",
        input_file,
        output_file_gdal_warp
    ]
    try:
        log.info(args)
        #TODO: handle subprocess Error (like that is not taken)
        output = subprocess.check_output(args)
        # stdout_value, error = proc.communicate()
        log.info(output)
    except Exception, e:
        raise Exception(e)
コード例 #3
0
def _merge_layers(rows,
                  cols,
                  geotransform,
                  spatialreference,
                  array1,
                  array2,
                  min1,
                  max1,
                  min2,
                  max2,
                  nodata1=None,
                  nodata2=None):
    path = create_tmp_filename('', ".tif")

    # find the indexes of the arrays
    index1 = (array1 > min1) & (array1 <= max1) & (array1 != nodata1)
    index2 = (array2 > min2) & (array2 <= max2) & (array2 != nodata2)

    # merge array indexes
    compound_index = index1 & index2
    del index1, index2

    # create a new raster
    output_raster = gdal.GetDriverByName('GTiff').Create(
        path, rows, cols, 1, gdal.GDT_Int16)  # Open the file
    output_raster.SetGeoTransform(geotransform)
    srs = SpatialReference(wkt=spatialreference)
    output_raster.SetProjection(srs.ExportToWkt())
    # create raster from the compound_index of the two rasters
    # TODO: the reshape slows the operation, use matrixes
    output_raster.GetRasterBand(1).WriteArray(
        compound_index.reshape(cols, rows))
    return path
コード例 #4
0
def crop_raster_with_bounding_box(input_file,
                                  minlon,
                                  minlat,
                                  maxlon,
                                  maxlat,
                                  srcnodata=None):
    if srcnodata == None:
        srcnodata = get_nodata_value(input_file)

    log.info("crop_raster_with_bounding_box")
    output_file = create_tmp_filename('.tif', 'gdal_translate_by_bbox')
    args = [
        "gdal_translate", "-a_nodata",
        str(srcnodata), "-projwin",
        str(minlat),
        str(minlon),
        str(maxlat),
        str(maxlon), input_file, output_file
    ]
    try:
        log.info(args)
        proc = subprocess.call(args, stdout=subprocess.PIPE, stderr=None)
        # proc = subprocess.check_output(args)
    except Exception, e:
        raise Exception(e)
コード例 #5
0
def create_shp_from_postgis(db_connection_string, query, output_path=None):
    '''
    Create a shp from a post
    :param db_connection_string:
    :param query:
    :return:
    '''
    # ogr2ogr -f "ESRI Shapefile" /home/vortex/Desktop/export/afg.shp PG:"host=localhost user=fenix dbname=fenix password=Qwaszx" -sql "select * from spatial.gaul0_2015_4326 where adm0_code IN ('1')"
    # ogr2ogr -f "ESRI Shapefile" output.shp PG:"" -sql "query"
    if output_path is None:
        output_path = create_tmp_filename('shp', 'tmp_shp_postgis', 'tmp_shp_postgis_' + str(uuid.uuid4()), False)
    else:
        # TODO:
        # get the folder by output_path filename
        # check if the folder is created, otherwise create it
        log.warn("TODO: get the folder by output_path filename. check if the folder is created, otherwise create it.")
    args = [
        'ogr2ogr',
        # TODO: optional overwrite
        '-overwrite',
        '-f',
        '"ESRI Shapefile"',
        output_path,
        db_connection_string,
        "-sql",
        "\"" + query + "\""
    ]
    try:
        cmd = " ".join(args)
        log.info(cmd)
        p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
        p.wait()
    except Exception, e:
        log.error(e)
        raise Exception(e)
コード例 #6
0
def crop_by_vector_from_db(input_file,
                           db_connection_string,
                           query,
                           srcnodata='nodata',
                           dstnodata='nodata'):
    log.info(query)
    output_file_gdal_warp = create_tmp_filename('.tif', 'gdal_warp')
    output_file = create_tmp_filename('.tif', 'output')
    log.info(input_file)

    # crop the layer on cutline
    args = [
        "gdalwarp",
        "-q",
        "-multi",
        "-of",
        "GTiff",
        "-cutline",
        db_connection_string,
        "-csql",
        query,
        "-srcnodata",
        str(srcnodata),
        "-dstnodata",
        str(dstnodata),
        # -crop_to_cutline is needed otherwise the layer is not cropped
        # TODO: resolve shifting problem
        # "-crop_to_cutline",
        # "-dstalpha",
        input_file,
        output_file_gdal_warp
    ]
    try:
        log.info(args)
        #TODO: handle subprocess Error (like that is not taken)
        output = subprocess.check_output(args)
        # stdout_value, error = proc.communicate()
        log.info(output)
    except Exception, e:
        raise Exception(e)
コード例 #7
0
def _merge_layers(rows, cols, geotransform, spatialreference, array1, array2, min1, max1, min2, max2, nodata1=None, nodata2=None):
    path = create_tmp_filename('', ".tif")

    # find the indexes of the arrays
    index1 = (array1 > min1) & (array1 <= max1) & (array1 != nodata1)
    index2 = (array2 > min2) & (array2 <= max2) & (array2 != nodata2)

    # merge array indexes
    compound_index = index1 & index2
    del index1, index2

    # create a new raster
    output_raster = gdal.GetDriverByName('GTiff').Create(path, rows, cols, 1, gdal.GDT_Int16)  # Open the file
    output_raster.SetGeoTransform(geotransform)
    srs = SpatialReference(wkt=spatialreference)
    output_raster.SetProjection(srs.ExportToWkt())
    # create raster from the compound_index of the two rasters
    # TODO: the reshape slows the operation, use matrixes
    output_raster.GetRasterBand(1).WriteArray(compound_index.reshape(cols, rows))
    return path
コード例 #8
0
def crop_vector_with_bounding_box(input_file_bbox, file_to_crop, output_path=None):
    # ogr2ogr -f "ESRI Shapefile" output.shp input.shp -clipsrc <x_min> <y_min> <x_max> <y_max>
    with fiona.open(input_file_bbox) as c:
        with fiona.open(file_to_crop) as d:
            if output_path is None:
                output_path = create_tmp_filename('shp', 'tmp_shp_bbox', 'tmp_shp_bbox_' + str(uuid.uuid4()), False)
                bounds = c.bounds
                # s_srs = c.crs['init']
                # t_srs = d.crs['init']
                # check bounds
                if bounds[0] == 0.0 and bounds[1] == 0.0 and bounds[2] == 0.0 and bounds[3] == 0.0:
                    msg = "Shapefile " + input_file_bbox + " has 0 invalide size"
                    log.error(msg)
                    raise Exception(msg)
                # TODO: change bounds coordinate sysmte in case it's needed (i.e. s_srs != t_srs)

                args = [
                    'ogr2ogr',
                    # TODO: optional overwrite
                    '-overwrite',
                    '-f',
                    '"ESRI Shapefile"',
                    output_path,
                    file_to_crop,
                    '-clipsrc',
                    str(bounds[0]),
                    str(bounds[1]),
                    str(bounds[2]),
                    str(bounds[3]),
                ]
                try:
                    cmd = " ".join(args)
                    log.info(cmd)
                    p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
                    p.wait()
                except Exception, e:
                    log.error(e)
                    raise Exception(e)
コード例 #9
0
def crop_raster_with_bounding_box(input_file, minlon, minlat, maxlon, maxlat, srcnodata=None):
    if srcnodata == None:
        srcnodata = get_nodata_value(input_file)

    log.info("crop_raster_with_bounding_box")
    output_file = create_tmp_filename('.tif', 'gdal_translate_by_bbox')
    args = [
        "gdal_translate",
        "-a_nodata", str(srcnodata),
        "-projwin",
        str(minlat),
        str(minlon),
        str(maxlat),
        str(maxlon),
        input_file,
        output_file
    ]
    try:
        log.info(args)
        proc = subprocess.call(args, stdout=subprocess.PIPE, stderr=None)
        # proc = subprocess.check_output(args)
    except Exception, e:
        raise Exception(e)
コード例 #10
0
    def export_vector_by_spatial_query(self, user_json, distribution_url=None, distribution_folder=None):

        vector_filter = user_json["extract_by"]
        db_options = vector_filter["options"]
        db_datasource = db_options["db"]
        layer_code = db_options["layer"]
        column_code = db_options["column"]
        codes = db_options["codes"]
        email_address = None if "email_address" not in user_json else user_json["email_address"]
        vectors = user_json["vector"]


        # getting distribution folder
        distribution_folder = self._get_distribution_folder(distribution_folder)

        # TODO remove dependency from here?
        sq = SpatialQuery(self.config)

        # get file to extract
        output_dirs = []
        for vector in vectors:
            vector_path = get_vector_path(vector)
            srid = get_srid_vector(vector_path)
            #srid = "'merc'"
            log.info(srid)

            # get query
            query = sq.get_query_string_select_all(db_datasource, layer_code, column_code, codes, "ST_Transform(geom, " + srid + ")")
            log.info(query)

            db = sq.get_db_instance()
            db_connection_string = db.get_connection_string(True)
            output_name = get_filename(vector_path)
            output_file_path = crop_vector_on_vector_bbox_and_postgis(vector_path, '"' + db_connection_string + '"', query, output_name)

            # check if filepath exists
            if output_file_path:
                output_dirs.append(os.path.dirname(output_file_path))

        # create a random tmp folder
        zip_folder_id = str(uuid.uuid4()).encode("utf-8")
        zip_folder = os.path.join(distribution_folder, zip_folder_id)
        os.mkdir(zip_folder)

        # create a valid folder name to zip it
        output_folder = os.path.join(zip_folder, "layers")
        os.mkdir(output_folder)

        # move output dirs to distribution folder
        for output_dir in output_dirs:
            for file in glob.glob(os.path.join(output_dir, "*")):
                move(file, output_folder)

        # zip the folder
        tmp_file = create_tmp_filename()
        tmp_zip_path = make_archive(zip_folder, tmp_file)
        # TODO: workaround, strangly it cannot be created a zip file in the folder to zip
        zip_path = os.path.join(zip_folder, "layers.zip")
        os.rename(tmp_zip_path, zip_path)
        log.info(zip_path)

        # URL to the resource
        if distribution_url is None:
            return zip_path
        else:
            url = distribution_url + zip_folder_id

            # send email if email address
            self._send_email(url, email_address)

            return '{ "url" : "' + url + '"}'
コード例 #11
0
    def export_vector_by_spatial_query(self,
                                       user_json,
                                       distribution_url=None,
                                       distribution_folder=None):

        vector_filter = user_json["extract_by"]
        db_options = vector_filter["options"]
        db_datasource = db_options["db"]
        layer_code = db_options["layer"]
        column_code = db_options["column"]
        codes = db_options["codes"]
        email_address = None if "email_address" not in user_json else user_json[
            "email_address"]
        vectors = user_json["vector"]

        # getting distribution folder
        distribution_folder = self._get_distribution_folder(
            distribution_folder)

        # TODO remove dependency from here?
        sq = SpatialQuery(self.config)

        # get file to extract
        output_dirs = []
        for vector in vectors:
            vector_path = get_vector_path(vector)
            srid = get_srid_vector(vector_path)
            #srid = "'merc'"
            log.info(srid)

            # get query
            query = sq.get_query_string_select_all(
                db_datasource, layer_code, column_code, codes,
                "ST_Transform(geom, " + srid + ")")
            log.info(query)

            db = sq.get_db_instance()
            db_connection_string = db.get_connection_string(True)
            output_name = get_filename(vector_path)
            output_file_path = crop_vector_on_vector_bbox_and_postgis(
                vector_path, '"' + db_connection_string + '"', query,
                output_name)

            # check if filepath exists
            if output_file_path:
                output_dirs.append(os.path.dirname(output_file_path))

        # create a random tmp folder
        zip_folder_id = str(uuid.uuid4()).encode("utf-8")
        zip_folder = os.path.join(distribution_folder, zip_folder_id)
        os.mkdir(zip_folder)

        # create a valid folder name to zip it
        output_folder = os.path.join(zip_folder, "layers")
        os.mkdir(output_folder)

        # move output dirs to distribution folder
        for output_dir in output_dirs:
            for file in glob.glob(os.path.join(output_dir, "*")):
                move(file, output_folder)

        # zip the folder
        tmp_file = create_tmp_filename()
        tmp_zip_path = make_archive(zip_folder, tmp_file)
        # TODO: workaround, strangly it cannot be created a zip file in the folder to zip
        zip_path = os.path.join(zip_folder, "layers.zip")
        os.rename(tmp_zip_path, zip_path)
        log.info(zip_path)

        # URL to the resource
        if distribution_url is None:
            return zip_path
        else:
            url = distribution_url + zip_folder_id

            # send email if email address
            self._send_email(url, email_address)

            return '{ "url" : "' + url + '"}'
コード例 #12
0
def _crop_by_vector_database(input_file, query, db_connection_string, minlat, minlon, maxlat, maxlon, srcnodata='nodata', dstnodata='nodata'):
    log.info(query)
    output_file_gdal_translate = create_tmp_filename('.tif', 'gdal_translate')
    output_file_gdal_warp = create_tmp_filename('.tif', 'gdal_warp')
    output_file = create_tmp_filename('.tif', 'output')
    args = [
        'gdal_translate',
        '-projwin',
        str(minlat),
        str(minlon),
        str(maxlat),
        str(maxlon),
        input_file,
        output_file_gdal_translate
    ]
    try:
        log.info(args)
        #TODO: handle subprocess Error (like that is not taken)
        proc = subprocess.call(args, stdout=subprocess.PIPE, stderr=None)
    except:
        stdout_value = proc.communicate()[0]
        raise Exception(stdout_value)

    args = [
        'gdalwarp',
        "-q",
        "-multi",
        "-of", "GTiff",
        "-cutline", db_connection_string,
        "-csql", query,
        "-srcnodata", str(srcnodata),
        "-dstnodata", str(dstnodata),
        # -crop_to_cutline is needed otherwise the layer is not cropped
        # TODO: resolve shifting problem
        # "-crop_to_cutline",
        # "-dstalpha",
        output_file_gdal_translate,
        output_file_gdal_warp
    ]
    try:
        #log.info(args)
        #TODO: handle subprocess Error (like that is not taken)
        proc = subprocess.call(args, stdout=subprocess.PIPE, stderr=None)
    except:
        stdout_value = proc.communicate()[0]
        raise Exception(stdout_value)

    # TODO: is it useful the third opetation?
    args = [
        'gdal_translate',
        "-co", "COMPRESS=DEFLATE",
        "-a_nodata", str(dstnodata),
        output_file_gdal_warp,
        output_file
    ]
    try:
        log.info(args)
        #TODO: handle subprocess Error (like that is not taken)
        proc = subprocess.call(args, stdout=subprocess.PIPE, stderr=None)
    except:
        stdout_value = proc.communicate()[0]
        raise Exception(stdout_value)

    os.remove(output_file_gdal_warp)
    os.remove(output_file_gdal_translate)

    if os.path.isfile(output_file):
        return output_file
    return None
コード例 #13
0
def _crop_by_vector_database(input_file,
                             query,
                             db_connection_string,
                             minlat,
                             minlon,
                             maxlat,
                             maxlon,
                             srcnodata='nodata',
                             dstnodata='nodata'):
    log.info(query)
    output_file_gdal_translate = create_tmp_filename('.tif', 'gdal_translate')
    output_file_gdal_warp = create_tmp_filename('.tif', 'gdal_warp')
    output_file = create_tmp_filename('.tif', 'output')
    args = [
        'gdal_translate', '-projwin',
        str(minlat),
        str(minlon),
        str(maxlat),
        str(maxlon), input_file, output_file_gdal_translate
    ]
    try:
        log.info(args)
        #TODO: handle subprocess Error (like that is not taken)
        proc = subprocess.call(args, stdout=subprocess.PIPE, stderr=None)
    except:
        stdout_value = proc.communicate()[0]
        raise Exception(stdout_value)

    args = [
        'gdalwarp',
        "-q",
        "-multi",
        "-of",
        "GTiff",
        "-cutline",
        db_connection_string,
        "-csql",
        query,
        "-srcnodata",
        str(srcnodata),
        "-dstnodata",
        str(dstnodata),
        # -crop_to_cutline is needed otherwise the layer is not cropped
        # TODO: resolve shifting problem
        # "-crop_to_cutline",
        # "-dstalpha",
        output_file_gdal_translate,
        output_file_gdal_warp
    ]
    try:
        #log.info(args)
        #TODO: handle subprocess Error (like that is not taken)
        proc = subprocess.call(args, stdout=subprocess.PIPE, stderr=None)
    except:
        stdout_value = proc.communicate()[0]
        raise Exception(stdout_value)

    # TODO: is it useful the third opetation?
    args = [
        'gdal_translate', "-co", "COMPRESS=DEFLATE", "-a_nodata",
        str(dstnodata), output_file_gdal_warp, output_file
    ]
    try:
        log.info(args)
        #TODO: handle subprocess Error (like that is not taken)
        proc = subprocess.call(args, stdout=subprocess.PIPE, stderr=None)
    except:
        stdout_value = proc.communicate()[0]
        raise Exception(stdout_value)

    os.remove(output_file_gdal_warp)
    os.remove(output_file_gdal_translate)

    if os.path.isfile(output_file):
        return output_file
    return None