コード例 #1
0
def rename(path, from_basename, to_basename):
    files = glob.glob(path + "/*.tif")

    for f in files:
        if os.path.isfile(f):
            new_filename = get_filename(f).replace(from_basename, to_basename).replace(".", "") + "_3857.tif"
            print "Rename:", get_filename(f), "to:", new_filename
            os.rename(f, path + "/" + new_filename)
コード例 #2
0
def rename(path, from_basename, to_basename):
    files = glob.glob(path + "/*.tif")

    for f in files:
        if os.path.isfile(f):
            new_filename = get_filename(f).replace(from_basename, to_basename) + ".tif"
            os.rename(f, path + "/" + new_filename)
コード例 #3
0
def process_metadata(workspace, storage_folder):
    folders = glob.glob(storage_folder+ "*")

    # for each file
    for f in folders:
        print "-------------"
        print "folder: " + f

        # get filename (it's the same as the folder)
        filename = get_filename(f)
        print "filename: " + f

        # get UID of the file
        uid = sanitize_name(filename)
        print "uid stored layer: " + uid

        # get UID of the stored 3857 projection file
        uid_3857 = sanitize_name(workspace + ":" + filename.rsplit("_", 1)[0])
        print "uid_3857 to search: " + uid_3857

        # get path to the file (+.geotiff extention)
        path_file = os.path.join(f, filename + ".geotiff")

        print "path_file: " + path_file
        update_published_layer_with_distribution_uid(uid_3857, uid)
def process_files():
    files = glob.glob("/home/vortex/Desktop/LAYERS/GHG_13_NOVEMEBRE/MAGHG-data/OUTPUT/*.tif")
    output_path = "/home/vortex/Desktop/LAYERS/GHG_13_NOVEMEBRE/MAGHG-data/OUTPUT/storage"
    if not os.path.isdir(output_path):
        # shutil.rmtree(output_path)
        os.mkdir(output_path)

    # for each file
    for f in files:
        # get filename
        filename = get_filename(f)
        extension = f.split(".")[1]

        # if _4326 projection
        if filename.endswith("_4326"):
            # create the folder
            folder_to_move_path = create_folder(output_path, filename)

            # move the file to the returned folder path
            final_path = os.path.join(folder_to_move_path, filename + "." + extension)
            print "Final path: " + str(final_path)
            shutil.move(f, final_path)

            # rename the file is it's not yet .geotiff
            if not final_path.endswith(".geotiff"):
                 f_geotiff = final_path.replace("."+extension, ".geotiff")
                 print "Renaming to: " + str(f_geotiff)
                 os.rename(final_path, f_geotiff)
コード例 #5
0
def process_tifs_warp(input_path, output_path, input_ext='tif'):
    if not os.path.exists(output_path):
        os.makedirs(output_path)

    files = glob.glob(input_path + "/*." + input_ext)
    for f in files:
        filename = get_filename(f)
        output_file_path = output_path + "/" + filename + ".tif"
        print output_file_path
        cmd = "gdalwarp -s_srs EPSG:4326 -t_srs EPSG:3857 -co TILED=YES -co COMPRESS=DEFLATE '" + f + "' '" + output_file_path + "'"
        print cmd
        process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
        output, error = process.communicate()
        print output
        print error
コード例 #6
0
def process_tifs_translate(input_path, output_path, base_str, replace_str):
    if not os.path.exists(output_path):
        os.makedirs(output_path)

    files = glob.glob(input_path + "/*.tif")
    for f in files:
        filename = get_filename(f)
        output_file_path = output_path + "/" + filename.replace(base_str, replace_str) + ".tif"
        print output_file_path
        cmd = "gdal_translate -co TILED=YES -co COMPRESS=DEFLATE '" + f + "' '" + output_file_path + "'"
        print cmd
        process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
        output, error = process.communicate()
        print output
        print error
        add_gdaladdo(output_file_path)
def calc(basepath, output_path, layers, epsg="3857"):
    print "-----Anomaly DPY"


    for layer in layers:
        # print get_year_by_filename(layer)
        year = get_year_by_filename(layer)

        data = None
        month = get_month_by_filename(layer)
        filename = get_filename(layer)
        base_filename = get_base_filename(filename)
        yearPrev = str(int(year) - 1)

        layerPrev = basepath + "/" + base_filename + "_" + yearPrev + month + "_" + epsg + ".tif"

        print "Processing: ", layer, layerPrev

        try:
            print "Reading: ",  layer
            r = rasterio.open(layer)
            r_data = r.read_band(1).astype(float)

            print "Reading: ",  layerPrev
            r_prev = rasterio.open(layerPrev)
            r_prev_data = r_prev.read_band(1).astype(float)

            if data is None:
                data, kwargs = initialize_rasterio_raster(r, rasterio.float32)


            nodata = 0
            index1 = (r_data != nodata)
            index2 = (r_prev_data != nodata)
            r_data = index1 * index2 * r_data
            r_prev_data = index1 * index2 * r_prev_data
            data = r_data - r_prev_data

            # writing
            output_layer_path = output_path + "/" + filename + ".tif"
            print "Writing: ", output_layer_path
            with rasterio.open(output_layer_path, 'w', **kwargs) as dst:
                dst.write_band(1, data.astype(rasterio.float32))
        except Exception, e:
            print e
コード例 #8
0
def harvest_raster_folder(path, workspace=None):
    # read files
    files = glob.glob(os.path.join(path, "*"))
    for f in files:
        # sanitize file name?
        filename = get_filename(f)
        extension = get_file_extension(f)
        if extension in supported_file:
            log.info(filename)
            metadata = parse_filename(filename, get_authority(f).upper())
            metadata = create_metadata(metadata)

            # check if exists a file named like the "file".json ( and in case overwrite the what is used there)
            #metadata_file =
            print metadata

        if extension in metadata_file:
            log.info(filename)
コード例 #9
0
def publish(input_folder):
    input_files = glob.glob(input_folder + "/*.geotiff")

    # filename
    for input_file in input_files:
        print input_file
        filename = get_filename(input_file)
        filename, projection_code = filename.split("_")
        projection_code = "EPSG:" + projection_code
        product = "test_storage"

        title = filename.replace("_", " ")
        metadata_def = create_metadata(title, product, None, None, projection_code, None, None, True, None, "storage")

        try:
            print metadata_def
            # upload
            # data_manager.publish_coveragestore_storage(input_file, metadata_def, False, False, True)
        except Exception, e:
            print e
コード例 #10
0
def process_tifs(base_path, output_path):
    files = glob.glob(base_path + "/*.tif")
    for f in files:
        filename = get_filename(f)
        output_file_path = output_path + "/" + filename + ".tif"
        print output_file_path

        # "-multi": "",
        # "-overwrite": "",
        # "-of": "GTiff",
        # "-s_srs": "'+proj=sinu +R=6371007.181 +nadgrids=@null +wktext'",
        # "-t_srs": "EPSG:3857"
        #
        # "a_nodata": "-3000",
        # "-co": "'TILED=YES'",
        # "-co": "'COMPRESS=DEFLATE'"
        #
        cmd = "gdal_translate -a_nodata -3000 -co TILED=YES -co COMPRESS=DEFLATE '" + f + "' '" + output_file_path + "'"
        print cmd
        process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
        output, error = process.communicate()
コード例 #11
0
    def export_raster_by_spatial_query(self,
                                       user_json,
                                       distribution_url=None,
                                       distribution_folder=None):
        log.info(user_json)
        log.info(self.config)

        # getting distribution folder
        distribution_folder = self._get_distribution_folder(
            distribution_folder)

        # TODO remove dependency from here?
        sq = SpatialQuery(self.config)

        vector_filter = user_json["extract_by"]
        db_options = vector_filter["options"]
        db_datasource = db_options["db"]
        layer_code = db_options["layer"]
        column_code = db_options["column"]
        codes = db_options["codes"]
        email_address = None if "email_address" not in user_json else user_json[
            "email_address"]
        rasters = user_json["raster"]

        log.info(rasters)

        # create a random tmp folder
        zip_folder_id = str(uuid.uuid4()).encode("utf-8")
        zip_folder = os.path.join(distribution_folder, zip_folder_id)
        os.mkdir(zip_folder)

        # create a valid folder name to zip it
        output_folder = os.path.join(zip_folder, "layers")
        os.mkdir(output_folder)

        output_files = []
        for raster in rasters:
            log.info(raster)
            raster_path = get_raster_path(raster)
            log.info(raster_path)
            # turning relative to absolute path if required
            # TODO: handle somehow better (it is used just for test)
            if not os.path.isabs(raster_path):
                # this is used to normalize relative path used during test
                raster_path = os.path.normpath(
                    os.path.join(os.path.dirname(__file__), raster_path))
                log.info(raster_path)
                raster_path = os.path.abspath(raster_path)
            log.info(raster_path)
            srid = get_srid_raster(raster_path)
            log.info(srid)

            # retrieving bounding box
            bbox = sq.query_bbox(db_datasource, layer_code, column_code, codes,
                                 srid)
            log.info(bbox)

            # create the file on tm folder
            db = sq.get_db_instance()
            db_connection_string = db.get_connection_string(True)
            query = sq.get_query_string_select_all(db_datasource, layer_code,
                                                   column_code, codes, "*")
            log.info(query)
            filepath = crop_raster_on_vector_bbox_and_postgis_db(
                raster_path, db_connection_string, query, bbox[0][0],
                bbox[0][1], bbox[1][0], bbox[1][1])
            # bounding_box = crop_raster_with_bounding_box(raster_path, bbox[0][0], bbox[0][1], bbox[1][0], bbox[1][1])

            # move file to distribution tmp folder
            path, filename, name = get_filename(filepath, True)
            dst_file = os.path.join(output_folder, filename)
            move(filepath, dst_file)

            # rename file based on uid layer_name (i.e. fenix:trmm_08_2014 -> trmm_08_2014)
            output_filename = get_filename(raster_path) + ".tif"
            output_file = os.path.join(output_folder, output_filename)
            os.rename(dst_file, output_file)

            # saving the output file to zip
            output_files.append(output_file)

        # zip folder or files
        # TODO: change and use make_archive
        #output_filename = os.path.join(zip_folder, zip_filename)
        #make_archive(folder_to_zip, output_filename)
        zip_path = zip_files(zip_filename, output_files, zip_folder)

        # URL to the resource
        if distribution_url is None:
            return zip_path
        else:
            url = distribution_url + zip_folder_id

            # send email if email address
            self._send_email(url, email_address)

            return '{ "url" : "' + url + '"}'
コード例 #12
0
    def export_raster_by_spatial_query(self, user_json, distribution_url=None, distribution_folder=None):
        log.info(user_json)
        log.info(self.config)

        # getting distribution folder
        distribution_folder = self._get_distribution_folder(distribution_folder)

        # TODO remove dependency from here?
        sq = SpatialQuery(self.config)

        vector_filter = user_json["extract_by"]
        db_options = vector_filter["options"]
        db_datasource = db_options["db"]
        layer_code = db_options["layer"]
        column_code = db_options["column"]
        codes = db_options["codes"]
        email_address = None if "email_address" not in user_json else user_json["email_address"]
        rasters = user_json["raster"]

        log.info(rasters)

        # create a random tmp folder
        zip_folder_id = str(uuid.uuid4()).encode("utf-8")
        zip_folder = os.path.join(distribution_folder, zip_folder_id)
        os.mkdir(zip_folder)

        # create a valid folder name to zip it
        output_folder = os.path.join(zip_folder, "layers")
        os.mkdir(output_folder)

        output_files = []
        for raster in rasters:
            log.info(raster)
            raster_path = get_raster_path(raster)
            log.info(raster_path)
            # turning relative to absolute path if required
            # TODO: handle somehow better (it is used just for test)
            if not os.path.isabs(raster_path):
                # this is used to normalize relative path used during test
                raster_path = os.path.normpath(os.path.join(os.path.dirname(__file__), raster_path))
                log.info(raster_path)
                raster_path = os.path.abspath(raster_path)
            log.info(raster_path)
            srid = get_srid_raster(raster_path)
            log.info(srid)

            # retrieving bounding box
            bbox = sq.query_bbox(db_datasource, layer_code, column_code, codes, srid)
            log.info(bbox)

            # create the file on tm folder
            db = sq.get_db_instance()
            db_connection_string = db.get_connection_string(True)
            query = sq.get_query_string_select_all(db_datasource, layer_code, column_code, codes, "*")
            log.info(query)
            filepath = crop_raster_on_vector_bbox_and_postgis_db(raster_path, db_connection_string, query, bbox[0][0], bbox[0][1], bbox[1][0], bbox[1][1])
            # bounding_box = crop_raster_with_bounding_box(raster_path, bbox[0][0], bbox[0][1], bbox[1][0], bbox[1][1])

            # move file to distribution tmp folder
            path, filename, name = get_filename(filepath, True)
            dst_file = os.path.join(output_folder, filename)
            move(filepath, dst_file)

            # rename file based on uid layer_name (i.e. fenix:trmm_08_2014 -> trmm_08_2014)
            output_filename = get_filename(raster_path) + ".tif"
            output_file = os.path.join(output_folder, output_filename)
            os.rename(dst_file, output_file)

            # saving the output file to zip
            output_files.append(output_file)

        # zip folder or files
        # TODO: change and use make_archive
        #output_filename = os.path.join(zip_folder, zip_filename)
        #make_archive(folder_to_zip, output_filename)
        zip_path = zip_files(zip_filename, output_files, zip_folder)

        # URL to the resource
        if distribution_url is None:
            return zip_path
        else:
            url = distribution_url + zip_folder_id

            # send email if email address
            self._send_email(url, email_address)

            return '{ "url" : "' + url + '"}'
コード例 #13
0
    def export_vector_by_spatial_query(self, user_json, distribution_url=None, distribution_folder=None):

        vector_filter = user_json["extract_by"]
        db_options = vector_filter["options"]
        db_datasource = db_options["db"]
        layer_code = db_options["layer"]
        column_code = db_options["column"]
        codes = db_options["codes"]
        email_address = None if "email_address" not in user_json else user_json["email_address"]
        vectors = user_json["vector"]


        # getting distribution folder
        distribution_folder = self._get_distribution_folder(distribution_folder)

        # TODO remove dependency from here?
        sq = SpatialQuery(self.config)

        # get file to extract
        output_dirs = []
        for vector in vectors:
            vector_path = get_vector_path(vector)
            srid = get_srid_vector(vector_path)
            #srid = "'merc'"
            log.info(srid)

            # get query
            query = sq.get_query_string_select_all(db_datasource, layer_code, column_code, codes, "ST_Transform(geom, " + srid + ")")
            log.info(query)

            db = sq.get_db_instance()
            db_connection_string = db.get_connection_string(True)
            output_name = get_filename(vector_path)
            output_file_path = crop_vector_on_vector_bbox_and_postgis(vector_path, '"' + db_connection_string + '"', query, output_name)

            # check if filepath exists
            if output_file_path:
                output_dirs.append(os.path.dirname(output_file_path))

        # create a random tmp folder
        zip_folder_id = str(uuid.uuid4()).encode("utf-8")
        zip_folder = os.path.join(distribution_folder, zip_folder_id)
        os.mkdir(zip_folder)

        # create a valid folder name to zip it
        output_folder = os.path.join(zip_folder, "layers")
        os.mkdir(output_folder)

        # move output dirs to distribution folder
        for output_dir in output_dirs:
            for file in glob.glob(os.path.join(output_dir, "*")):
                move(file, output_folder)

        # zip the folder
        tmp_file = create_tmp_filename()
        tmp_zip_path = make_archive(zip_folder, tmp_file)
        # TODO: workaround, strangly it cannot be created a zip file in the folder to zip
        zip_path = os.path.join(zip_folder, "layers.zip")
        os.rename(tmp_zip_path, zip_path)
        log.info(zip_path)

        # URL to the resource
        if distribution_url is None:
            return zip_path
        else:
            url = distribution_url + zip_folder_id

            # send email if email address
            self._send_email(url, email_address)

            return '{ "url" : "' + url + '"}'
コード例 #14
0
    def export_vector_by_spatial_query(self,
                                       user_json,
                                       distribution_url=None,
                                       distribution_folder=None):

        vector_filter = user_json["extract_by"]
        db_options = vector_filter["options"]
        db_datasource = db_options["db"]
        layer_code = db_options["layer"]
        column_code = db_options["column"]
        codes = db_options["codes"]
        email_address = None if "email_address" not in user_json else user_json[
            "email_address"]
        vectors = user_json["vector"]

        # getting distribution folder
        distribution_folder = self._get_distribution_folder(
            distribution_folder)

        # TODO remove dependency from here?
        sq = SpatialQuery(self.config)

        # get file to extract
        output_dirs = []
        for vector in vectors:
            vector_path = get_vector_path(vector)
            srid = get_srid_vector(vector_path)
            #srid = "'merc'"
            log.info(srid)

            # get query
            query = sq.get_query_string_select_all(
                db_datasource, layer_code, column_code, codes,
                "ST_Transform(geom, " + srid + ")")
            log.info(query)

            db = sq.get_db_instance()
            db_connection_string = db.get_connection_string(True)
            output_name = get_filename(vector_path)
            output_file_path = crop_vector_on_vector_bbox_and_postgis(
                vector_path, '"' + db_connection_string + '"', query,
                output_name)

            # check if filepath exists
            if output_file_path:
                output_dirs.append(os.path.dirname(output_file_path))

        # create a random tmp folder
        zip_folder_id = str(uuid.uuid4()).encode("utf-8")
        zip_folder = os.path.join(distribution_folder, zip_folder_id)
        os.mkdir(zip_folder)

        # create a valid folder name to zip it
        output_folder = os.path.join(zip_folder, "layers")
        os.mkdir(output_folder)

        # move output dirs to distribution folder
        for output_dir in output_dirs:
            for file in glob.glob(os.path.join(output_dir, "*")):
                move(file, output_folder)

        # zip the folder
        tmp_file = create_tmp_filename()
        tmp_zip_path = make_archive(zip_folder, tmp_file)
        # TODO: workaround, strangly it cannot be created a zip file in the folder to zip
        zip_path = os.path.join(zip_folder, "layers.zip")
        os.rename(tmp_zip_path, zip_path)
        log.info(zip_path)

        # URL to the resource
        if distribution_url is None:
            return zip_path
        else:
            url = distribution_url + zip_folder_id

            # send email if email address
            self._send_email(url, email_address)

            return '{ "url" : "' + url + '"}'
コード例 #15
0
def get_base_filename(f):
    filename = get_filename(f)
    s = filename.split("_")
    # return ', '.s[len(s)-3]
    return '_'.join(s[:len(s)-2])
コード例 #16
0
def get_date_by_filename(f):
    filename = get_filename(f)
    s = filename.split("_")
    date = s[len(s)-2]
    return date
コード例 #17
0
def get_month_by_filename(f):
    filename = get_filename(f)
    s = filename.split("_")
    date = s[len(s)-2]
    return date[4:6]