Esempio n. 1
0
def defineProj(ds_name):

    ds_proj = Open(ds_name, GA_ReadOnly).GetProjection()
    if not ds_proj:
        return None
    params = ds_proj.split("PARAMETER")

    proj = "+proj=aea +lat_1="

    lat_1 = params[1].split(",")[1][:-1]

    proj = proj + lat_1 + " +lat_2="

    lat_2 = params[2].split(",")[1][:-1]

    proj = proj + lat_2 + " +lat_0="

    lat_0 = params[3].split(",")[1][:-1]

    proj = proj + lat_0 + " +lon_0="

    lon_0 = params[4].split(",")[1][:-1]

    proj = proj + lon_0 + " +x_0="

    x_0 = params[5].split(",")[1][:-1]

    proj = proj + x_0 + " +y_0="

    y_0 = params[6].split(",")[1][:-1]

    proj = proj + y_0

    proj = "\"" + proj + " +ellps=WGS84 +units=m +no_defs\""
    return proj
Esempio n. 2
0
def convert(abs_path_filename, input_type):
    """ This function is used to convert the native satellite data into a
    pyramid/tile friendly geotiff format

        Currently supports:
            HDF

    """

    print("Converting HDF to Gtiff...\n")

    print "abs_path_filename = ", abs_path_filename
    abs_path = '.'.join(abs_path_filename.split('.')[:-1])
    print "abs_path = ", abs_path
    filename = abs_path_filename.split('/')[-1]
    print "filename = ", filename
    if (not os.path.exists(abs_path)):
        os.makedirs(abs_path)
    if (input_type.lower() == "hdf"):
        src_ds = Open(abs_path_filename, GA_ReadOnly)
        src_ds_subsets = src_ds.GetSubDatasets()
        src_proj = "-s_srs"
        t_proj = "-t_srs"
        t_proj_value = "EPSG:4326"

    for subset in src_ds_subsets:
        var_name = subset[0].split(':')[-1]
        dst_filename = '.'.join(
            filename.split(".")[:-1]) + "-" + var_name + ".tiff"
        dst_abs_path_filename = os.path.join(abs_path, dst_filename)
        src_proj_value = defineProj(subset[0])
        call = None
        if src_proj_value == None:
            sub = Open(subset[0])
            #gcp1 = GCP( -180.0, 90.0, 0.0, 0, 0 )
            #gcp2 = GCP( 180.0, -90.0, 0.0, sub.RasterYSize, sub.RasterXSize )
            #sub.SetGCPs( [gcp1,gcp2], "EPSG:4326" )
            #proj = "-a_srs"
            #proj_val = "EPSG:4326"
            georef = "-a_ullr"
            georef_val = "-180.0 90.0 180.0 -90.0"

            #call = " ".join(["gdal_translate",proj,proj_val,georef,georef_val,subset[0],dst_abs_path_filename])
            call = " ".join([
                "gdal_translate", georef, georef_val, subset[0],
                dst_abs_path_filename
            ])
            print "call = ", call
        else:
            call = " ".join([
                "gdalwarp", src_proj, src_proj_value, t_proj, t_proj_value,
                subset[0], dst_abs_path_filename
            ])

        sp.call(shlex.split(call))

    # Close the data set
    src_ds = 0

    return abs_path
Esempio n. 3
0
def defineProj( ds_name ):

    ds_proj = Open( ds_name, GA_ReadOnly ).GetProjection()
    if not ds_proj:
        return None
    params = ds_proj.split("PARAMETER")

    proj = "+proj=aea +lat_1="

    lat_1 = params[1].split(",")[1][:-1]

    proj = proj+lat_1+" +lat_2="

    lat_2 = params[2].split(",")[1][:-1]

    proj = proj+lat_2+" +lat_0="

    lat_0 = params[3].split(",")[1][:-1]

    proj = proj+lat_0+" +lon_0="

    lon_0 = params[4].split(",")[1][:-1]

    proj = proj+lon_0+" +x_0="

    x_0 = params[5].split(",")[1][:-1]

    proj = proj+x_0+" +y_0="

    y_0 = params[6].split(",")[1][:-1]

    proj = proj+y_0

    proj = "\""+proj+" +ellps=WGS84 +units=m +no_defs\""
    return proj
Esempio n. 4
0
def main():

    if len(sys.argv) < 6:
        print "Few arguments"
        sys.exit()

    elif len(sys.argv) > 6:
        print "Too many arguments"
        sys.exit()

    #Tiff paths
    red_path = str(sys.argv[1])
    nir_path = str(sys.argv[2])
    bqa_path = str(sys.argv[3])
    mtl_path = str(sys.argv[4])
    output_path = str(sys.argv[5])
    d_sun_earth_path = 'd_sun_earth'

    # Open red image and get its only band.
    red_tiff = Open(red_path)
    red_band = red_tiff.GetRasterBand(1)

    # Open NIR image and get its only band.
    nir_tiff = Open(nir_path)
    nir_band = nir_tiff.GetRasterBand(1)

    # Open bqa image and get its only band.
    bqa_tiff = Open(bqa_path)
    bqa_band = bqa_tiff.GetRasterBand(1)

    # Get the rows and cols from one of the images (both should always be the same)
    row, col, geotransform = nir_tiff.RasterYSize, nir_tiff.RasterXSize, nir_tiff.GetGeoTransform(
    )

    # Reading metadata
    parameters = metaReader.readParameters(mtl_path, d_sun_earth_path)

    number_sensor = int(parameters[0])
    julian_day = int(parameters[1])

    sun_elevation = float(parameters[2])
    dist_sun_earth = float(parameters[3])

    mask = get_mask(number_sensor)

    if shadow_check(bqa_band, (col, row), mask):
        print "Invalid inputs. Lots of cloud in tiff images"
        sys.exit()

    ndviC = NDVI(sun_elevation, red_band, nir_band, bqa_band, (col, row),
                 geotransform)
    ndviC.processNDVI(number_sensor, dist_sun_earth, output_path)
Esempio n. 5
0
 def get_timeseries(self):
     images = [
         str(pth) for pth in self._files.values()
         if str(pth).endswith('AnalyticMS_clip.tif')
     ]
     times, ratios = [], []
     for img in images:
         data = Open(img)
         ratio = get_median(data, "blue") / get_median(data, "red")
         ratios.append(ratio)
         time = datetime.strptime(
             data.GetMetadata_Dict()['TIFFTAG_DATETIME'],
             "%Y:%m:%d %H:%M:%S")
         times.append(time)
     return times, ratios
Esempio n. 6
0
def read(fName):
    """File reader
    
    Parameters
    ----------
    fname : string, file path to read in (supports .npy, .sav, .pkl, and 
	.tif (GEOTIFF)) including file extension
    
    Returns
    -------
    output : output data (in whatever format)
    """
    ext = fName[-3:]

    if ext == 'npy':
        return np.load(fName)
    elif ext == 'sav' or ext == 'pkl':
        return pickle.load(open(fName, 'rb'))
    elif ext == 'mat':
        return loadmat(fName)
    elif ext == 'tif' or 'pix':
        return Open(fName).ReadAsArray()
    else:
        print('Unknown filename extension')
        return -1
Esempio n. 7
0
def start():
    user_input = input('Enter file path for first_band: ')
    # Open First image and get its only band.
    # first_tiff = Open(r'51/AST_L1B_00308132001191022_20110122163617_24451_ImageData1_b30de2ff.tif')
    first_tiff = Open(user_input)
    first_band = first_tiff.GetRasterBand(1)

    user_input = input('Enter file path for second_band of same image: ')
    # Open Second image and get its only band.
    # second_tiff = Open(r'51/AST_L1B_00308132001191022_20110122163617_24451_ImageData2_b30de2ff.tif')
    second_tiff = Open(user_input)
    second_band = second_tiff.GetRasterBand(1)

    # Release from memory
    user_input = None

    # Get the rows and cols from one of the images (both should always be the same)
    rows, cols, geotransform = first_tiff.RasterYSize, first_tiff.RasterXSize, first_tiff.GetGeoTransform()
    print(geotransform)

    # Set an output for a 16-bit unsigned integer (0-255)
    out_tiff_int16 = r'TEST-RESULTS/NDVI_INT16.tif'

    # Set the output for a 32-bit floating point (-1 to 1)
    out_tiff_float32 = r'TEST-RESULTS/NDVI_FLOAT32.tif'

    # Run the function for unsigned 16-bit integer
    ndwi(first_band, second_band, rows, cols, geotransform, out_tiff_int16, gdal.GDT_UInt16)

    # Run the function for 32-bit floating point
    ndwi(first_band, second_band, rows, cols, geotransform, out_tiff_float32, gdal.GDT_Float32)

    print('done')
Esempio n. 8
0
def apply_valid_range(input_data_set_path: str, file_path: str) -> str:
    """Apply Valid Range -10000 -> 10000.

    Args:
        input_data_set_path (str) - Path to the input data set
        file_path (str) - Target data set filename
    Returns:
        Path to valid_range_image
    """
    src_ds = GDALOpen(input_data_set_path, GA_ReadOnly)

    if src_ds is None:
        raise ValueError(
            'Could not open data set "{}"'.format(input_data_set_path))

    driver = GetDriverByName('MEM')

    src_band = src_ds.GetRasterBand(1)
    data_set = driver.Create('', src_ds.RasterXSize, src_ds.RasterYSize, 1,
                             src_band.DataType)
    data_set.SetGeoTransform(src_ds.GetGeoTransform())
    data_set.SetProjection(src_ds.GetProjection())

    data_set_band = data_set.GetRasterBand(1)

    data_set_band.WriteArray(src_band.ReadAsArray())

    band_array = data_set_band.ReadAsArray()
    dummy = -9999
    data_set_band.SetNoDataValue(dummy)
    band_array[band_array <= -10000] = dummy
    band_array[band_array >= 10000] = dummy
    driver = GetDriverByName('GTiff')
    data_set_band.WriteArray(band_array)

    dst_ds = driver.CreateCopy(file_path, data_set, options=["COMPRESS=LZW"])

    del dst_ds
    del src_ds
    del data_set

    return file_path
Esempio n. 9
0
    def get_band_data(self,
                      band_index: int,
                      retrieve_uncertainty: bool = True) -> ObservationData:
        data_set_url = self._get_data_set_url(band_index)
        data_set = Open(data_set_url)
        if self._reprojection is not None:
            data_set = self._reprojection.reproject(data_set)
        data = data_set.ReadAsArray()
        mask = data > 0
        data = np.where(mask, data / 10000., self._no_data_values[band_index])

        if retrieve_uncertainty:
            uncertainty = _get_uncertainty(data, mask)
        else:
            uncertainty = None
        band_emulator = self._get_band_emulator(band_index)

        observation_data = ObservationData(observations=data,
                                           uncertainty=uncertainty,
                                           mask=mask,
                                           metadata=self._meta_data_infos,
                                           emulator=band_emulator)
        return observation_data
Esempio n. 10
0
def _convert_to_tif(
    partial_coverage_tiles: List[PartialCoverageTile], wms_crs_code: str
) -> None:
    for tile in partial_coverage_tiles:
        if os.path.exists(tile.wms_path):
            logging.debug(f"Converting {tile.wms_path} to {tile.tif_path}")
            src_file = Open(tile.wms_path, GA_ReadOnly)
            Translate(
                tile.tif_path,
                src_file,
                format="GTiff",
                noData=None,
                outputSRS=wms_crs_code,
                # Translate expects bounds in format ulX, ulY, lrX, lrY so flip minY and maxY
                outputBounds=(tile.x_min, tile.y_max, tile.x_max, tile.y_min),
            )
            if remove_intermediaries():
                os.remove(tile.wms_path)
        else:
            logging.warn(f"Expected file {tile.wms_path} does not exist")
Esempio n. 11
0
from skimage.transform import resize
from skimage.color import rgb2lab, lab2rgb, rgb2gray, gray2rgb
from imgaug import augmenters as iaa
import embedding
import sklearn.feature_extraction.image as im
import cv2


# ========== INPUT ==========
# Define input image dimensions
img_rows, img_cols = 32, 32
# The images are RGB.
img_channels = 3

# Use GDAL OPEN to read base RGB WorldView(WV02) map and corresponding reference label
wv = Open('mapMul.tif')
land = np.zeros((wv.RasterYSize,wv.RasterXSize,3)).astype('uint16')  #np.zeros_like(wv).astype('int16')
land[:,:,0] = wv.GetRasterBand(5).ReadAsArray()  # swaping the band and re-arrange the band order into 1-R, 2-G and 3-B
land[:,:,1] = wv.GetRasterBand(3).ReadAsArray()
land[:,:,2] = wv.GetRasterBand(2).ReadAsArray()
del wv  # delete variables to save memory resources
label = imread("labelMul.tif")

wv2 = Open('mapMul2.tif')
land2 = np.zeros((wv2.RasterYSize,wv2.RasterXSize,3)).astype('uint16')  #np.zeros_like(wv).astype('int16')
land2[:,:,0] = wv2.GetRasterBand(5).ReadAsArray()  # swaping the band and re-arrange the band order into 1-R, 2-G and 3-B
land2[:,:,1] = wv2.GetRasterBand(3).ReadAsArray()
land2[:,:,2] = wv2.GetRasterBand(2).ReadAsArray()
del wv2  # delete variables to save memory resources

# Convert to 0-255 valued UINT8 image
Esempio n. 12
0
import gdal
from gdal import Open
from ndvi import ndvi

# Open NIR image and get its only band.
nir_tiff = Open(r'central_catchment_area_vs_urban_area_nir.jpg')
nir_band = nir_tiff.GetRasterBand(1)

# Open red image and get its only band.
red_tiff = Open(r'central_catchment_area_vs_urban_area_redlight.jpg')
red_band = red_tiff.GetRasterBand(1)

# Get the rows and cols from one of the images (both should always be the same)
rows, cols, geotransform = nir_tiff.RasterYSize, nir_tiff.RasterXSize, nir_tiff.GetGeoTransform(
)
print(geotransform)

# Set an output for a 16-bit unsigned integer (0-255)
out_tiff_int16 = r'central_catchment_area_vs_urban_area_INT16.tif'

# Set the output for a 32-bit floating point (-1 to 1)
out_tiff_float32 = r'central_catchment_area_vs_urban_area_FLOAT32.tif'

# Run the function for unsigned 16-bit integer
ndvi(nir_band, red_band, rows, cols, geotransform, out_tiff_int16,
     gdal.GDT_UInt16)

# Run the function for 32-bit floating point
ndvi(nir_band, red_band, rows, cols, geotransform, out_tiff_float32,
     gdal.GDT_Float32)
Esempio n. 13
0
import gdal
from gdal import Open
from ndvi import ndvi

# Open NIR image and get its only band.
nir_tiff = Open(r'infrared_v1_blacknwhite.jpg')
nir_band = nir_tiff.GetRasterBand(1)

# Open red image and get its only band.
red_tiff = Open(r'normal_red_v1.jpg')
red_band = red_tiff.GetRasterBand(1)

# Get the rows and cols from one of the images (both should always be the same)
rows, cols, geotransform = nir_tiff.RasterYSize, nir_tiff.RasterXSize, nir_tiff.GetGeoTransform()
print(geotransform)



# Set the output for a 32-bit floating point (-1 to 1)
out_tiff_float32 = r'officeoutside_ndvi_float32.tif'



# Run the function for 32-bit floating point
ndvi(nir_band, red_band, rows, cols, geotransform, out_tiff_float32, gdal.GDT_Float32)

print('done')
Esempio n. 14
0
import gdal
from gdal import Open
from ndvi import ndvi
from ndvi import evi
from ndvi import dvi
from ndvi import ndwi
from ndvi import mndwi
from ndvi import ndbi
from ndvi import savi
from ndvi import ndbi
from ndvi import ndmi
import xlrd
import openpyxl

nir_tiff = Open(r'NIR_IMAGE.tif')
nir_band = nir_tiff.GetRasterBand(1)
print(nir_band)

red_tiff = Open(r'RED_IMAGE.tif')
red_band = red_tiff.GetRasterBand(1)
print(red_band)
blue_tiff = Open(r'BLUE_IMAGE.tif')
blue_band = blue_tiff.GetRasterBand(1)

green_tiff = Open(r'GREEN_IMAGE.tif')
green_band = green_tiff.GetRasterBand(1)

swir_tiff = Open(r'SWIR_IMAGE.tif')
swir_band = swir_tiff.GetRasterBand(1)

mir_tiff = Open(r'MIR_IMAGE.tif')
Esempio n. 15
0
def mesh_crue10_run(args):
    set_logger_level(args.verbose)
    t1 = perf_counter()

    # Read the model and its submodels from xml/shp files
    etude = Etude(args.infile_etu)
    modele = etude.get_modele(args.model_name)
    modele.read_all()
    logger.info(modele)
    for sous_modele in modele.liste_sous_modeles:
        sous_modele.remove_sectioninterpolee()
        sous_modele.normalize_geometry()
        logger.info(sous_modele.summary())
        # sous_modele.write_shp_limites_lits_numerotes('limites_lits.shp')  # DEBUG
    logger.info(modele)

    global_mesh_constr = MeshConstructor()

    # Handle branches in minor bed
    for i, branche in enumerate(modele.get_liste_branches()):
        # Ignore branch if branch_patterns is set and do not match with current branch name
        if args.branch_patterns is not None:
            ignore = True
            for pattern in args.branch_patterns:
                if pattern in branche.id:
                    ignore = False
                    break
        else:
            ignore = False

        if branche.type not in args.branch_types_filter or not branche.is_active:
            ignore = True

        if not ignore:
            logger.info("===== TRAITEMENT DE LA BRANCHE %s =====" % branche.id)
            axe = branche.geom
            try:
                section_seq = CrossSectionSequence()
                for crue_section in branche.liste_sections_dans_branche:
                    if isinstance(crue_section, SectionProfil):
                        coords = list(crue_section.get_coord(add_z=True))
                        section = CrossSection(crue_section.id,
                                               [(coord[0], coord[1])
                                                for coord in coords],
                                               'Section')

                        # Determine some variables (constant over the simulation) from the geometry
                        z = np.array([coord[2] for coord in coords])
                        is_bed_active = crue_section.get_is_bed_active_array()
                        mean_strickler = crue_section.get_friction_coeff_array(
                        )
                        section.coord.values = np.core.records.fromarrays(
                            np.column_stack(
                                (z, is_bed_active, mean_strickler)).T,
                            names=VARIABLES_FROM_GEOMETRY)

                        section_seq.add_section(section)

                section_seq.compute_dist_proj_axe(axe, args.dist_max)
                if len(section_seq) >= 2:
                    section_seq.check_intersections()
                    # section_seq.sort_by_dist() is useless because profiles are already sorted
                    constraint_lines = ConstraintLine.get_lines_and_set_limits_from_sections(
                        section_seq, args.interp_constraint_lines)

                    mesh_constr = MeshConstructor(
                        section_seq=section_seq,
                        lat_step=args.lat_step,
                        nb_pts_lat=args.nb_pts_lat,
                        interp_values=args.interp_values)
                    mesh_constr.build_interp(constraint_lines, args.long_step,
                                             args.constant_long_disc)
                    mesh_constr.build_mesh(in_floworiented_crs=True)

                    global_mesh_constr.append_mesh_constr(mesh_constr)
                else:
                    logger.warning("Branche ignorée par manque de sections")
            except TatooineException as e:
                logger.error(
                    "/!\\ Branche ignorée à cause d'une erreur bloquante :")
                logger.error(e.message)
            logger.info("\n")

    # Handle casiers in floodplain
    nb_casiers = len(modele.get_liste_casiers())
    if args.infile_dem and nb_casiers > 0:
        logger.info("===== TRAITEMENT DES CASIERS =====")

        if not os.path.exists(args.infile_dem):
            raise TatooineException("File not found: %s" % args.infile_dem)
        from gdal import Open
        raster = Open(args.infile_dem)
        dem_interp = interp_raster(raster)

        floodplain_step = args.floodplain_step if not None else args.long_step
        max_elem_area = floodplain_step * floodplain_step / 2.0
        simplify_dist = floodplain_step / 2.0

        for i, casier in enumerate(modele.get_liste_casiers()):
            if casier.is_active:
                if casier.geom is None:
                    raise TatooineException(
                        "Geometry of %s could not be found" % casier)
                line = casier.geom.simplify(simplify_dist)
                if not line.is_closed:
                    raise RuntimeError
                coords = resample_2d_line(
                    line.coords,
                    floodplain_step)[1:]  # Ignore last duplicated node

                hard_nodes_xy = np.array(coords, dtype=np.float)
                hard_nodes_idx = np.arange(0, len(hard_nodes_xy), dtype=np.int)
                hard_segments = np.column_stack(
                    (hard_nodes_idx, np.roll(hard_nodes_idx, 1)))

                tri = {
                    'vertices':
                    np.array(
                        np.column_stack(
                            (hard_nodes_xy[:, 0], hard_nodes_xy[:, 1]))),
                    'segments':
                    hard_segments,
                }
                triangulation = triangle.triangulate(tri,
                                                     opts='qpa%f' %
                                                     max_elem_area)

                nodes_xy = np.array(triangulation['vertices'], dtype=np.float)
                bottom = dem_interp(nodes_xy)
                points = unstructured_to_structured(np.column_stack(
                    (nodes_xy, bottom)),
                                                    names=['X', 'Y', 'Z'])

                global_mesh_constr.add_floodplain_mesh(triangulation, points)

    if len(global_mesh_constr.points) == 0:
        raise ExceptionCrue10(
            "Aucun point à traiter, adaptez l'option `--branch_patterns` et/ou `--branch_types_filter`"
        )

    logger.info(global_mesh_constr.summary()
                )  # General information about the merged mesh

    if args.infile_rcal:
        # Read rcal result file
        results = RunResults(args.infile_rcal)
        logger.info(results.summary())

        # Check result consistency
        missing_sections = modele.get_missing_active_sections(
            results.emh['Section'])
        if missing_sections:
            raise ExceptionCrue10("Sections manquantes :\n%s" %
                                  missing_sections)

        # Subset results to get requested variables at active sections
        varnames_1d = results.variables['Section']
        logger.info("Variables 1D disponibles aux sections: %s" % varnames_1d)
        try:
            pos_z = varnames_1d.index('Z')
        except ValueError:
            raise TatooineException(
                "La variable Z doit être présente dans les résultats aux sections"
            )
        if global_mesh_constr.has_floodplain:
            try:
                pos_z_fp = results.variables['Casier'].index('Z')
            except ValueError:
                raise TatooineException(
                    "La variable Z doit être présente dans les résultats aux casiers"
                )
        else:
            pos_z_fp = None

        pos_variables = [
            results.variables['Section'].index(var) for var in varnames_1d
        ]
        pos_sections_list = [
            results.emh['Section'].index(profil.id)
            for profil in global_mesh_constr.section_seq
        ]
        if global_mesh_constr.has_floodplain:
            pos_casiers_list = [
                results.emh['Casier'].index(casier.id)
                for casier in modele.get_liste_casiers() if casier.is_active
            ]
        else:
            pos_casiers_list = []

        additional_variables_id = ['H']
        if 'Vact' in varnames_1d:
            additional_variables_id.append('M')

        values_geom = global_mesh_constr.interp_values_from_geom()
        z_bottom = values_geom[0, :]
        with Serafin.Write(args.outfile_mesh, args.lang,
                           overwrite=True) as resout:
            title = '%s (written by TatooineMesher)' % os.path.basename(
                args.outfile_mesh)
            output_header = Serafin.SerafinHeader(title=title, lang=args.lang)
            output_header.from_triangulation(
                global_mesh_constr.triangle['vertices'],
                global_mesh_constr.triangle['triangles'] + 1)
            for var_name in VARIABLES_FROM_GEOMETRY:
                if var_name in ['B', 'W']:
                    output_header.add_variable_from_ID(var_name)
                else:
                    output_header.add_variable_str(var_name, var_name, '')
            for var_id in additional_variables_id:
                output_header.add_variable_from_ID(var_id)
            for var_name in varnames_1d:
                output_header.add_variable_str(var_name, var_name, '')
            resout.write_header(output_header)

            if args.calc_unsteady is None:
                for i, calc_name in enumerate(results.calc_steady_dict.keys()):
                    logger.info("~> Calcul permanent %s" % calc_name)
                    # Read a single *steady* calculation
                    res_steady = results.get_res_steady(calc_name)
                    variables_at_profiles = res_steady['Section'][
                        pos_sections_list, :][:, pos_variables]
                    if global_mesh_constr.has_floodplain:
                        z_at_casiers = res_steady['Casier'][pos_casiers_list,
                                                            pos_z_fp]
                    else:
                        z_at_casiers = None

                    # Interpolate between sections and set in casiers
                    values_res = global_mesh_constr.interp_values_from_res(
                        variables_at_profiles, z_at_casiers, pos_z)

                    # Compute water depth: H = Z - Zf and clip below 0m (avoid negative values)
                    depth = np.clip(values_res[pos_z, :] - z_bottom,
                                    a_min=0.0,
                                    a_max=None)

                    # Merge and write values
                    if 'Vact' in varnames_1d:
                        # Compute velocity magnitude from Vact and apply mask "is active bed"
                        velocity = values_res[
                            varnames_1d.index('Vact'), :] * values_geom[1, :]
                        values = np.vstack(
                            (values_geom, depth, velocity, values_res))
                    else:
                        values = np.vstack((values_geom, depth, values_res))

                    resout.write_entire_frame(output_header, 3600.0 * i,
                                              values)

            else:
                calc_unsteady = results.get_calc_unsteady(args.calc_unsteady)
                logger.info("Calcul transitoire %s" % args.calc_unsteady)
                res_unsteady = results.get_res_unsteady(args.calc_unsteady)

                for i, (time, _) in enumerate(calc_unsteady.frame_list):
                    logger.info("~> %fs" % time)
                    res_at_sections = res_unsteady['Section'][i, :, :]
                    variables_at_profiles = res_at_sections[
                        pos_sections_list, :][:, pos_variables]
                    if global_mesh_constr.has_floodplain:
                        z_at_casiers = res_unsteady['Casier'][i,
                                                              pos_casiers_list,
                                                              pos_z_fp]
                    else:
                        z_at_casiers = None

                    # Interpolate between sections
                    values_res = global_mesh_constr.interp_values_from_res(
                        variables_at_profiles, z_at_casiers, pos_z)

                    # Compute water depth: H = Z - Zf and clip below 0m (avoid negative values)
                    depth = np.clip(values_res[pos_z, :] - z_bottom,
                                    a_min=0.0,
                                    a_max=None)

                    # Merge and write values
                    if 'Vact' in varnames_1d:
                        # Compute velocity magnitude from Vact and apply mask "is active bed"
                        velocity = values_res[
                            varnames_1d.index('Vact'), :] * values_geom[1, :]
                        values = np.vstack(
                            (values_geom, depth, velocity, values_res))
                    else:
                        values = np.vstack((values_geom, depth, values_res))

                    resout.write_entire_frame(output_header, time, values)

    else:
        # Write a single frame with only variables from geometry
        global_mesh_constr.export_mesh(args.outfile_mesh, lang=args.lang)

    t2 = perf_counter()
    logger.info("=> Execution time: {}s".format(t2 - t1))
Esempio n. 16
0
import numpy as np
from gdal import Open, GA_Update
import sys
import os
from keras import backend as K
import tensorflow as tf


# ========== INPUT ==========
# Define input image dimensions
img_rows, img_cols = 32, 32
# The images are RGB.
img_channels = 3

# Use GDAL OPEN to read base RGB WorldView(WV02) map and corresponding reference label
wv = Open('mapMul.tif')
land = np.zeros((wv.RasterYSize,wv.RasterXSize,3)).astype('uint16')  #np.zeros_like(wv).astype('int16')
land[:,:,0] = wv.GetRasterBand(5).ReadAsArray()  # swaping the band and re-arrange the band order into 1-R, 2-G and 3-B
land[:,:,1] = wv.GetRasterBand(3).ReadAsArray()
land[:,:,2] = wv.GetRasterBand(2).ReadAsArray()
del wv  # delete variables to save memory resources
label = imread("labelMul.tif")

# Convert to 0-255 valued UINT8 image
land = cv2.convertScaleAbs(land,alpha = 255.0/(np.max(land)))
slum = (label > 0).astype('uint8')
del label  # delete variables to save memory resources
#plt.imshow(slum, 'gray')
land_slum = np.dstack((land, slum))  # stack the map and label for drawing training patches

# Sample from slum blocks according to the size of connected slum blocks
Esempio n. 17
0
import gdal
from gdal import Open
from ndvi import ndvi

# Open a before image and get its only band.
before_ndvi = Open(r'x')  #input desired image file name into x
before_ndvi_band = before_ndvi.GetRasterBand(1)

# Open a after image and get its only band.
after_ndvi = Open(r'y')  #input desired image file name into y
after_ndvi_band = after_ndvi.GetRasterBand(1)

# Get the rows and cols from one of the images (both should always be the same)
rows, cols = before_ndvi.RasterYSize, before_ndvi.RasterXSize

# Run the function for 32-bit floating point
comparison(before_ndvi_band, after_ndvi_band, rows, cols)

print('done')
Esempio n. 18
0
def publish(collection_item: CollectionItem, scene: RadcorActivity):
    """Publish Landsat collection.

    It works with both Digital Number (DN) and Surface Reflectance (SR).

    Args:
        collection_item - Collection Item
        scene - Current Activity
    """
    identifier = scene.sceneid

    # Get collection level to publish. Default is l1
    collection_level = scene.args.get('level') or 1

    landsat_scene = factory.get_from_sceneid(identifier,
                                             level=collection_level)

    productdir = scene.args.get('file')

    logging.warning('Publish {} - {} (id={})'.format(scene.collection_id,
                                                     productdir, scene.id))

    if productdir and productdir.endswith('.gz'):
        target_dir = landsat_scene.path()
        makedirs(target_dir, exist_ok=True)

        productdir = uncompress(productdir, str(target_dir))

    collection = Collection.query().filter(
        Collection.id == collection_item.collection_id).one()
    quicklook = collection.bands_quicklook.split(
        ',') if collection.bands_quicklook else DEFAULT_QUICK_LOOK_BANDS

    files = {}
    qlfiles = {}

    bands = landsat_scene.get_band_map()

    for gband, band in bands.items():
        fs = landsat_scene.get_files()

        if not fs:
            continue

        for f in fs:
            if f.stem.endswith(band) and f.suffix.lower().endswith('.tif'):
                files[gband] = f
                if gband in quicklook:
                    qlfiles[gband] = str(f)

    # Generate Vegetation Index files
    generate_vi(productdir, files)

    # Apply valid range and Cog files
    for band, file_path in files.items():
        tif_file = str(file_path)

        if landsat_scene.level == 2:
            _ = apply_valid_range(tif_file, tif_file)

        # Set destination of COG file
        files[band] = generate_cogs(tif_file, tif_file)
        if not is_valid_tif(tif_file):
            raise RuntimeError('Not Valid {}'.format(tif_file))

    # Extract basic scene information and build the quicklook
    pngname = productdir + '/{}.png'.format(identifier)

    dataset = GDALOpen(qlfiles['nir'], GA_ReadOnly)
    numlin = 768
    numcol = int(
        float(dataset.RasterXSize) / float(dataset.RasterYSize) * numlin)
    del dataset

    create_quick_look(pngname,
                      [qlfiles[band] for band in quicklook if band in qlfiles],
                      rows=numlin,
                      cols=numcol)

    productdir = productdir.replace(Config.DATA_DIR, '')

    assets_to_upload = {
        'quicklook':
        dict(file=pngname, asset=productdir.replace('/Repository/Archive', ''))
    }

    for instance in ['local', 'aws']:
        engine_instance = {'local': db, 'aws': db_aws}
        engine = engine_instance[instance]

        # Skip catalog on aws for digital number
        if landsat_scene.level == 1 and instance == 'aws':
            continue

        if instance == 'aws':
            asset_url = productdir.replace('/Repository/Archive',
                                           Config.AWS_BUCKET_NAME)
        else:
            asset_url = productdir

        pngname = resource_path.join(asset_url, Path(pngname).name)

        assets_to_upload['quicklook']['asset'] = pngname

        with engine.session.begin_nested():
            with engine.session.no_autoflush:
                # Add collection item to the session if not present
                if collection_item not in engine.session:
                    item = engine.session.query(CollectionItem).filter(
                        CollectionItem.id == collection_item.id).first()

                    if not item:
                        cloned_properties = CollectionItemForm().dump(
                            collection_item)
                        collection_item = CollectionItem(**cloned_properties)
                        engine.session.add(collection_item)

                collection_item.quicklook = pngname

                collection_bands = engine.session.query(Band)\
                    .filter(Band.collection_id == collection_item.collection_id)\
                    .all()

                # Inserting data into Product table
                for band in files:
                    template = resource_path.join(asset_url,
                                                  Path(files[band]).name)

                    dataset = GDALOpen(files[band], GA_ReadOnly)
                    asset_band = dataset.GetRasterBand(1)

                    chunk_x, chunk_y = asset_band.GetBlockSize()

                    band_model = next(
                        filter(lambda b: band == b.common_name,
                               collection_bands), None)

                    if not band_model:
                        logging.warning(
                            'Band {} of collection {} not found in database. Skipping...'
                            .format(band, collection_item.collection_id))
                        continue

                    defaults = dict(url=template,
                                    source=landsat_scene.source(),
                                    raster_size_x=dataset.RasterXSize,
                                    raster_size_y=dataset.RasterYSize,
                                    raster_size_t=1,
                                    chunk_size_t=1,
                                    chunk_size_x=chunk_x,
                                    chunk_size_y=chunk_y)

                    asset, _ = get_or_create_model(
                        Asset,
                        engine=engine,
                        defaults=defaults,
                        collection_id=scene.collection_id,
                        band_id=band_model.id,
                        grs_schema_id=scene.collection.grs_schema_id,
                        tile_id=collection_item.tile_id,
                        collection_item_id=collection_item.id,
                    )
                    asset.url = defaults['url']

                    assets_to_upload[band] = dict(file=files[band],
                                                  asset=asset.url)

                    # Add into scope of local and remote database
                    add_instance(engine, asset)

        # Persist database
        commit(engine)

    return assets_to_upload
Esempio n. 19
0
import numpy as np
import matplotlib.pyplot as plt
from gdal import Open as OpenGdal

#-------------------------------------------------------------------------------

fn0 = '../data/SVDNB_npp_20150101-20151231_75N060W_{}_v10_c201701311200.avg_rade9.tif'

# vcm - viirs cloud mask
# vcm-orm = outlier removed
# vcm-ntl = background (non-lights) removed
# vcm-orm-ntl = both
gd = OpenGdal(fn0.format('vcm-orm-ntl'))

print gd.RasterXSize, gd.RasterYSize
gt = gd.GetGeoTransform()

#-------------------------------------------------------------------------------

xy2geo = lambda g, x, y: (g[3] + g[5] * y, g[0] + g[1] * x)
geo2xy = lambda g, f, l: ((l - g[0]) / g[1], (f - g[3]) / g[5])

#-------------------------------------------------------------------------------

# geo0 = 51.11, 17.03 # wroclaw
# geo0 = 52.22, 21.01 # warszawa
# geo0 = 50.816, 15.383 # Orle
# geo0 = 50.846015, 16.698650 # tapadla
geo0 = 50.995681, 16.901729
geor = 0.6
f0, l0 = geo0[0] + geor, geo0[1] - geor
Esempio n. 20
0
import gdal
from gdal import Open
from comparison import comparison

# Open a before image and get its only band.
before_ndvi = Open(
    r'officeoutside_ndvi_float32.tif')  #input desired image file name into x
before_ndvi_band = before_ndvi.GetRasterBand(1)

# Open a after image and get its only band.
after_ndvi = Open(r'outsideoffice_v2_ndvi_float32.tif'
                  )  #input desired image file name into y
after_ndvi_band = after_ndvi.GetRasterBand(1)

# Get the rows and cols from one of the images (both should always be the same)
rows, cols = before_ndvi.RasterYSize, before_ndvi.RasterXSize

# Run the function for 32-bit floating point
comparison(before_ndvi_band, after_ndvi_band, rows, cols)

print('done')
Esempio n. 21
0
import gdal
from gdal import Open
from ndvi import ndvi

# Open NIR image and get its only band.
nir_tiff = Open(r'nir.tif')
nir_band = nir_tiff.GetRasterBand(1)

# Open red image and get its only band.
red_tiff = Open(r'red.tif')
red_band = red_tiff.GetRasterBand(1)

# Get the rows and cols from one of the images (both should always be the same)
rows, cols, geotransform = nir_tiff.RasterYSize, nir_tiff.RasterXSize, nir_tiff.GetGeoTransform(
)
print(geotransform)

# Set an output for a 16-bit unsigned integer (0-255)
out_tiff_int16 = r'NDVI_INT16.tif'

# Run the function for unsigned 16-bit integer
ndvi(nir_band, red_band, rows, cols, geotransform, out_tiff_int16,
     gdal.GDT_UInt16)

print('done')
Esempio n. 22
0
def publish(collection_item: CollectionItem, scene: RadcorActivity):
    """Publish Landsat collection.

    It works with both Digital Number (DN) and Surface Reflectance (SR).

    Args:
        collection_item - Collection Item
        scene - Current Activity
    """
    identifier = scene.sceneid
    cc = identifier.split('_')
    pathrow = cc[2]
    date = cc[3]
    yyyymm = '{}-{}'.format(date[0:4], date[4:6])

    productdir = scene.args.get('file')

    logging.warning('Publish {} - {} (id={})'.format(scene.collection_id,
                                                     productdir, scene.id))

    if productdir and productdir.endswith('.gz'):
        target_dir = Path(
            Config.DATA_DIR) / 'Repository/Archive/{}/{}/{}'.format(
                collection_item.collection_id, yyyymm, pathrow)
        makedirs(target_dir, exist_ok=True)

        productdir = uncompress(productdir, str(target_dir))

    collection = Collection.query().filter(
        Collection.id == collection_item.collection_id).one()
    quicklook = collection.bands_quicklook.split(
        ',') if collection.bands_quicklook else DEFAULT_QUICK_LOOK_BANDS

    files = {}
    qlfiles = {}

    if collection.id == 'LC8DN':
        bands = BAND_MAP_DN
    elif collection.id == 'LC8NBAR':
        bands = BAND_MAP_NBAR
    else:
        bands = BAND_MAP_SR

    for gband, band in bands.items():
        template = productdir + '/LC08_*_{}_{}_*_{}.*'.format(
            pathrow, date, band)
        fs = glob.glob(template)

        if not fs:
            continue

        for f in fs:
            if f.lower().endswith('.tif'):
                files[gband] = f
                if gband in quicklook:
                    qlfiles[gband] = f

    # Skip EVI/NDVI generation for Surface Reflectance
    # since the espa-science already done
    if collection.id == 'LC8DN' or collection.id == 'LC8NBAR':
        generate_vi(productdir, files)

    # Apply valid range and Cog files
    for band, file_path in files.items():
        if collection.id == 'LC8SR':
            _ = apply_valid_range(file_path, file_path)
        # Set destination of COG file
        files[band] = generate_cogs(file_path, file_path)
        if not is_valid_tif(file_path):
            raise RuntimeError('Not Valid {}'.format(file_path))

    # Extract basic scene information and build the quicklook
    pngname = productdir + '/{}.png'.format(identifier)

    dataset = GDALOpen(qlfiles['nir'], GA_ReadOnly)
    numlin = 768
    numcol = int(
        float(dataset.RasterXSize) / float(dataset.RasterYSize) * numlin)
    image = numpy.zeros((
        numlin,
        numcol,
        len(qlfiles),
    ), dtype=numpy.uint8)

    del dataset

    nb = 0
    for band in quicklook:
        template = qlfiles[band]
        dataset = GDALOpen(template, GA_ReadOnly)
        raster = dataset.GetRasterBand(1).ReadAsArray(0, 0,
                                                      dataset.RasterXSize,
                                                      dataset.RasterYSize)

        del dataset

        raster = resize(raster, (numlin, numcol), order=1, preserve_range=True)
        nodata = raster == -9999
        # Evaluate minimum and maximum values
        a = numpy.array(raster.flatten())
        p1, p99 = numpy.percentile(a[a > 0], (1, 99))
        # Convert minimum and maximum values to 1,255 - 0 is nodata
        raster = exposure.rescale_intensity(raster,
                                            in_range=(p1, p99),
                                            out_range=(1, 255)).astype(
                                                numpy.uint8)
        image[:, :, nb] = raster.astype(numpy.uint8) * numpy.invert(nodata)
        nb += 1

    write_png(pngname, image, transparent=(0, 0, 0))

    productdir = productdir.replace(Config.DATA_DIR, '')

    assets_to_upload = {
        'quicklook':
        dict(file=pngname, asset=productdir.replace('/Repository/Archive', ''))
    }

    for instance in ['local', 'aws']:
        engine_instance = {'local': db, 'aws': db_aws}
        engine = engine_instance[instance]

        # Skip catalog on aws for digital number
        if collection_item.collection_id == 'LC8DN' and instance == 'aws':
            continue

        if instance == 'aws':
            asset_url = productdir.replace('/Repository/Archive',
                                           Config.AWS_BUCKET_NAME)
        else:
            asset_url = productdir

        pngname = resource_path.join(asset_url, Path(pngname).name)

        assets_to_upload['quicklook']['asset'] = pngname

        with engine.session.begin_nested():
            with engine.session.no_autoflush:
                # Add collection item to the session if not present
                if collection_item not in engine.session:
                    item = engine.session.query(CollectionItem).filter(
                        CollectionItem.id == collection_item.id).first()

                    if not item:
                        cloned_properties = CollectionItemForm().dump(
                            collection_item)
                        collection_item = CollectionItem(**cloned_properties)
                        engine.session.add(collection_item)

                collection_item.quicklook = pngname

                collection_bands = engine.session.query(Band).filter(
                    Band.collection_id == collection_item.collection_id).all()

                # Inserting data into Product table
                for band in files:
                    template = resource_path.join(asset_url,
                                                  Path(files[band]).name)

                    dataset = GDALOpen(files[band], GA_ReadOnly)
                    asset_band = dataset.GetRasterBand(1)

                    chunk_x, chunk_y = asset_band.GetBlockSize()

                    band_model = next(
                        filter(lambda b: band == b.common_name,
                               collection_bands), None)

                    if not band_model:
                        logging.warning(
                            'Band {} of collection {} not found in database. Skipping...'
                            .format(band, collection_item.collection_id))
                        continue

                    defaults = dict(url=template,
                                    source=cc[0],
                                    raster_size_x=dataset.RasterXSize,
                                    raster_size_y=dataset.RasterYSize,
                                    raster_size_t=1,
                                    chunk_size_t=1,
                                    chunk_size_x=chunk_x,
                                    chunk_size_y=chunk_y)

                    asset, _ = get_or_create_model(
                        Asset,
                        engine=engine,
                        defaults=defaults,
                        collection_id=scene.collection_id,
                        band_id=band_model.id,
                        grs_schema_id=scene.collection.grs_schema_id,
                        tile_id=collection_item.tile_id,
                        collection_item_id=collection_item.id,
                    )
                    asset.url = defaults['url']

                    assets_to_upload[band] = dict(file=files[band],
                                                  asset=asset.url)

                    # Add into scope of local and remote database
                    add_instance(engine, asset)

            # Persist database
        commit(engine)

    return assets_to_upload
Esempio n. 23
0
import gdal
from gdal import Open
from ndvi import ndvi


# Abrindo banda NIR
nir_tiff = Open(r'NIR_IMAGE.tif')
#nir_tiff = Open(r'nir.tif')
nir_band = nir_tiff.GetRasterBand(1)

# Abrindo banda RED.
red_tiff = Open(r'RED_IMAGE.tif')
#red_tiff = Open(r'red.tif') 
red_band = red_tiff.GetRasterBand(1)

# Get the rows and cols from one of the images (both should always be the same)
rows, cols, geotransform = nir_tiff.RasterYSize, nir_tiff.RasterXSize, nir_tiff.GetGeoTransform()
print(geotransform)

# Arquivo de saida 16-bit (0-255)
out_tiff_int16 = r'NDVI_16.tif'

# Arquivo de saida 32-bit floating point (-1 to 1)
out_tiff_float32 = r'NDVI_32.tif'

# Gerando imagem  NDVI 16-bit integer
ndvi(nir_band, red_band, rows, cols, geotransform, out_tiff_int16, gdal.GDT_UInt16)

# Gerando imagem NDVI 32-bit floating point
ndvi(nir_band, red_band, rows, cols, geotransform, out_tiff_float32, gdal.GDT_Float32)
Esempio n. 24
0
        output = geotiff.Create(out_tiff, in_cols, in_rows, 1, gdal.GDT_UInt16)
        output.GetRasterBand(1).WriteArray(ndvi_int16)
    elif data_type == gdal.GDT_Float32:
        output = geotiff.Create(out_tiff, in_cols, in_rows, 1, gdal.GDT_Float32)
        output.GetRasterBand(1).WriteArray(ndvi_float32)
    else:
        raise ValueError('Invalid output data type.  Valid types are gdal.UInt16 or gdal.Float32.')

    # Set the geographic transformation as the input.
    output.SetGeoTransform(in_geotransform)

    # return the output image in case you want to do something else with it.
    return output

# Open NIR image and get its only band.
nir_tiff = Open(r'D:\landsat_data\20160521_halifax\sample_data_for_tool\NIR_IMAGE.tif')
nir_band = nir_tiff.GetRasterBand(1)

# Open red image and get its only band.
red_tiff = Open(r'D:\landsat_data\20160521_halifax\sample_data_for_tool\RED_IMAGE.tif')
red_band = red_tiff.GetRasterBand(1)

# Get the rows and cols from one of the images (both should always be the same)
rows, cols, geotransform = nir_tiff.RasterYSize, nir_tiff.RasterXSize, nir_tiff.GetGeoTransform()
print(geotransform)

# Set an output for a 16-bit unsigned integer (0-255)
out_tiff_int16 = r'D:\landsat_data\20160521_halifax\sample_data_for_tool\NDVI_INT16.tif'

# Set the output for a 32-bit floating point (-1 to 1)
out_tiff_float32 = r'D:\landsat_data\20160521_halifax\sample_data_for_tool\NDVI_FLOAT32.tif'