def arrays2raster(raster_filename, xy_raster_origin, dx, dy, array_list): nb_var = len(array_list) nb_rows, nb_cols = array_list[0][1].shape logger.info("Regular grid size : %i rows x %i columns" % (nb_rows, nb_cols)) origin_x = xy_raster_origin[0] origin_y = xy_raster_origin[1] driver = gdal.GetDriverByName('GTiff') out_raster = driver.Create(raster_filename, nb_cols, nb_rows, nb_var, gdal.GDT_Float64) # Set grid and EPSG if necessary out_raster.SetGeoTransform((origin_x, dx, 0, origin_y, 0, dy)) if args.epsg is not None: # EPSG attribution seems buggy out_raster_srs = osr.SpatialReference() out_raster_srs.ImportFromEPSG(args.epsg) out_raster.SetProjection(out_raster_srs.ExportToWkt()) # Add one band per variable for i_var, (var_ID, array) in enumerate(array_list): if array.shape != (nb_rows, nb_cols): raise RuntimeError outband = out_raster.GetRasterBand(i_var + 1) outband.SetDescription(var_ID) outband.WriteArray(array) outband.FlushCache()
def slf_base(args): with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) resin.get_time() output_header = resin.header.copy() # Shift mesh coordinates if necessary if args.shift: output_header.transform_mesh([Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)]) # Set mesh origin coordinates if args.set_mesh_origin: output_header.set_mesh_origin(args.set_mesh_origin[0], args.set_mesh_origin[1]) # Toggle output file endianness if necessary if args.toggle_endianness: output_header.toggle_endianness() # Convert to single precision if args.to_single_precision: if resin.header.is_double_precision(): output_header.to_single_precision() else: logger.warn('Input file is already single precision! Argument `--to_single_precision` is ignored') # Remove variables if necessary if args.var2del: output_header.empty_variables() for var_ID, var_name, var_unit in zip(resin.header.var_IDs, resin.header.var_names, resin.header.var_units): if var_ID not in args.var2del: output_header.add_variable(var_ID, var_name, var_unit) # Add new derived variables if args.var2add is not None: for var_ID in args.var2add: if var_ID in output_header.var_IDs: logger.warn('Variable %s is already present (or asked)' % var_ID) else: output_header.add_variable_from_ID(var_ID) us_equation = get_US_equation(args.friction_law) necessary_equations = get_necessary_equations(resin.header.var_IDs, output_header.var_IDs, is_2d=resin.header.is_2d, us_equation=us_equation) with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout: resout.write_header(output_header) for time_index, time in tqdm(resin.subset_time(args.start, args.end, args.ech), unit='frame'): values = do_calculations_in_frame(necessary_equations, resin, time_index, output_header.var_IDs, output_header.np_float_type, is_2d=output_header.is_2d, us_equation=us_equation, ori_values={}) resout.write_entire_frame(output_header, time, values)
def slf_to_raster(args): with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() header = resin.header logger.info(header.summary()) resin.get_time() if args.vars is None: var_names = [ var_name.decode('utf-8') for var_name in header.var_names ] var_IDs = header.var_IDs else: var_names = [] var_IDs = [] for var_ID, var_name in zip(header.var_IDs, header.var_names): if var_ID in args.vars: var_names.append(var_name.decode('utf-8')) var_IDs.append(var_ID) # Shift mesh coordinates if necessary if args.shift: header.transform_mesh( [Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)]) # Build output regular grid and matplotlib triangulation of the mesh m_xi, m_yi = np.meshgrid( np.arange(header.x.min(), header.x.max(), args.resolution), np.arange(header.y.min(), header.y.max(), args.resolution)) triang = mtri.Triangulation(header.x, header.y, triangles=header.ikle_2d - 1) # Build list containing all interpolated variables on the regular grid array_list = [] for i, (var_ID, var_name) in enumerate(zip(var_IDs, var_names)): values = resin.read_var_in_frame(args.frame_index, var_ID) interp = mtri.LinearTriInterpolator(triang, values) data = interp( m_xi, m_yi)[::-1] # reverse array so the tif looks like the array array_list.append((var_name, data)) logger.info( "Min and max values for interpolated %s variable: [%f, %f]" % (var_name, data.min(), data.max())) # Write data in the raster output file arrays2raster(args.out_tif, (header.x.min(), header.y.max()), args.resolution, -args.resolution, array_list)
def slf_last(args): with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) resin.get_time() output_header = resin.header.copy() # Shift mesh coordinates if necessary if args.shift: output_header.transform_mesh( [Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)]) # Toggle output file endianness if necessary if args.toggle_endianness: output_header.toggle_endianness() # Convert to single precision if args.to_single_precision: if resin.header.is_double_precision(): output_header.to_single_precision() else: logger.warn( 'Input file is already single precision! Argument `--to_single_precision` is ignored' ) values = np.empty((output_header.nb_var, output_header.nb_nodes), dtype=output_header.np_float_type) with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout: resout.write_header(output_header) time_index = len(resin.time) - 1 time = resin.time[-1] if args.time is None else args.time for i, var_ID in enumerate(output_header.var_IDs): values[i, :] = resin.read_var_in_frame(time_index, var_ID) resout.write_entire_frame(output_header, time, values)
def slf_int2d(args): # Read set of points file fields, indices = Shapefile.get_attribute_names(args.in_points) points = [] attributes = [] for point, attribute in Shapefile.get_points(args.in_points, indices): points.append(point) attributes.append(attribute) if not points: logger.critical('The Shapefile does not contain any point.') sys.exit(1) # Read Serafin file with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) if not resin.header.is_2d: logger.critical('The file has to be a 2D Serafin!') sys.exit(3) resin.get_time() output_header = resin.header.copy() mesh = MeshInterpolator(output_header, True) is_inside, point_interpolators = mesh.get_point_interpolators(points) nb_inside = sum(map(int, is_inside)) if nb_inside == 0: logger.critical('No point inside the mesh.') sys.exit(3) logger.debug( 'The file contains {} point{}. {} point{} inside the mesh'.format( len(points), 's' if len(points) > 1 else '', nb_inside, 's are' if nb_inside > 1 else ' is')) var_IDs = output_header.var_IDs if args.vars is None else args.vars mode = 'w' if args.force else 'x' with open(args.out_csv, mode, newline='') as csvfile: csvwriter = csv.writer(csvfile, delimiter=args.sep) header = ['time_id', 'time'] if args.long: header = header + [ 'point_id', 'point_x', 'point_y', 'variable', 'value' ] else: for pt_id, (x, y) in enumerate(points): for var in var_IDs: header.append( 'Point %d %s (%s|%s)' % (pt_id + 1, var, settings.FMT_COORD.format(x), settings.FMT_COORD.format(y))) csvwriter.writerow(header) for time_index, time in enumerate(tqdm(resin.time, unit='frame')): values = [time_index, time] for var_ID in var_IDs: var = resin.read_var_in_frame(time_index, var_ID) for pt_id, (point, point_interpolator) in enumerate( zip(points, point_interpolators)): if args.long: values_long = values + [str(pt_id + 1)] + [ settings.FMT_COORD.format(x) for x in point ] if point_interpolator is None: if args.long: csvwriter.writerow(values_long + [var_ID, settings.NAN_STR]) else: values.append(settings.NAN_STR) else: (i, j, k), interpolator = point_interpolator int_value = settings.FMT_FLOAT.format( interpolator.dot(var[[i, j, k]])) if args.long: csvwriter.writerow(values_long + [var_ID, int_value]) else: values.append(int_value) if not args.long: csvwriter.writerow(values)
def slf_sedi_chain(args): # Check that float parameters are positive (especially ws!) for arg in ('Cmud', 'ws', 'C', 'M'): if getattr(args, arg) < 0: logger.critical('The argument %s has to be positive' % args) sys.exit(1) with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) resin.get_time() us_equation = get_US_equation(args.friction_law) necessary_equations = get_necessary_equations(resin.header.var_IDs, ['TAU'], is_2d=True, us_equation=us_equation) if resin.header.nb_frames < 1: logger.critical('The input file must have at least one frame!') sys.exit(1) output_header = resin.header.copy() # Shift mesh coordinates if necessary if args.shift: output_header.transform_mesh( [Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)]) # Toggle output file endianness if necessary if args.toggle_endianness: output_header.toggle_endianness() # Convert to single precision if args.to_single_precision: if resin.header.is_double_precision(): output_header.to_single_precision() else: logger.warn( 'Input file is already single precision! Argument `--to_single_precision` is ignored' ) output_header.empty_variables() output_header.add_variable_from_ID('B') output_header.add_variable_from_ID('EV') with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout: resout.write_header(output_header) prev_time = None prev_tau = None initial_bottom = resin.read_var_in_frame(0, 'B') bottom = copy(initial_bottom) for time_index, time in enumerate(resin.time): tau = do_calculations_in_frame(necessary_equations, resin, time_index, ['TAU'], output_header.np_float_type, is_2d=True, us_equation=us_equation, ori_values={})[0] if prev_time is not None: dt = time - prev_time mean_tau = (prev_tau + tau) / 2 if args.Tcd > 0: bottom += args.Cmud * args.ws * args.C * \ (1 - np.clip(mean_tau/args.Tcd, a_min=None, a_max=1.)) * dt if args.Tce > 0: bottom -= args.Cmud * args.M * (np.clip( mean_tau / args.Tce, a_min=1., a_max=None) - 1.) * dt evol_bottom = bottom - initial_bottom resout.write_entire_frame(output_header, time, np.vstack((bottom, evol_bottom))) prev_time = time prev_tau = tau
def ADCP_comp(args): x_mes = [] y_mes = [] cord_mes = open(args.inADCP_GPS).read().splitlines() for x_l in cord_mes: y, x = x_l.split(',') if x == NODATA or y == NODATA: print("Warning: one point is missing") else: x_mes.append(x) y_mes.append(y) x_mes = [float(a) for a in x_mes] y_mes = [float(a) for a in y_mes] inProj = Proj("+init=EPSG:%i" % args.inEPSG) outProj = Proj("+init=EPSG:%i" % args.outEPSG) x_mes, y_mes = transform(inProj, outProj, x_mes, y_mes) SCHEMA = {'geometry': 'LineString', 'properties': {'nom': 'str'}} with fiona.open(args.outADCP_GPS, 'w', 'ESRI Shapefile', SCHEMA, crs=from_epsg(args.outEPSG)) as out_shp: Ltest = LineString([(x_2, y_2) for x_2, y_2 in zip(x_mes, y_mes)]) elem = {} elem['geometry'] = mapping(Ltest) elem['properties'] = {'nom': 'ADCP line'} out_shp.write(elem) p_raw = RawProfileObj(args.inADCP) processing_settings = {'proj_method': 2} startingpoint = dict(start=Vector(0, 0)) p0 = ProcessedProfileObj(p_raw, processing_settings, startingpoint) profile_averaged = averaging.get_averaged_profile(p0, cfg={'order': 15}) header = 'X;Y;Uadcp;Vadcp;MagnitudeXY;Hadcp\n' writeAscii2D(profile_averaged, '{x};{y};{vx};{vy};{vmag};{depth}', args.outADCP, header=header) if args.inTELEMAC: with open(args.outT2DCSV, 'w', newline='') as csvfile: csvwriter = csv.writer(csvfile, delimiter=';') HEADER = [ 'folder', 'time_id', 'time', 'point_x', 'point_y', 'distance', 'value' ] csvwriter.writerow(HEADER) for slf_path in args.inTELEMAC: folder = os.path.basename(os.path.split(slf_path)[0]) with Serafin.Read(slf_path, 'fr') as resin: resin.read_header() logger.info(resin.header.summary()) resin.get_time() output_header = resin.header.copy() if args.shift: output_header.transform_mesh([ Transformation(0, 1, 1, args.shift[0], args.shift[1], 0) ]) mesh = MeshInterpolator(output_header, True) lines = [] for poly in Shapefile.get_lines(args.outADCP_GPS, shape_type=3): lines.append(poly) nb_nonempty, indices_nonempty, line_interpolators, line_interpolators_internal = \ mesh.get_line_interpolators(lines) res = mesh.interpolate_along_lines( resin, 'M', list(range(len(resin.time))), indices_nonempty, line_interpolators, '{:.6e}') csvwriter.writerows([[folder] + x[2] for x in res])
def slf_flux2d(args): if len(args.scalars) > 2: logger.critical('Only two scalars can be integrated!') sys.exit(2) # Read set of lines from input file polylines = [] if args.in_sections.endswith('.i2s'): with BlueKenue.Read(args.in_sections) as f: f.read_header() for polyline in f.get_open_polylines(): polylines.append(polyline) elif args.in_sections.endswith('.shp'): try: for polyline in Shapefile.get_open_polylines(args.in_sections): polylines.append(polyline) except ShapefileException as e: logger.critical(e) sys.exit(3) else: logger.critical('File "%s" is not a i2s or shp file.' % args.in_sections) sys.exit(2) if not polylines: logger.critical('The file does not contain any open polyline.') sys.exit(1) logger.debug('The file contains {} open polyline{}.'.format( len(polylines), 's' if len(polylines) > 1 else '')) # Read Serafin file with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) resin.get_time() if not resin.header.is_2d: logger.critical('The file has to be a 2D Serafin!') sys.exit(3) # Determine flux computations properties var_IDs = args.vectors + args.scalars variables_missing = [ var_ID for var_ID in var_IDs if var_ID not in resin.header.var_IDs ] if variables_missing: if len(variables_missing) > 1: logger.critical( 'Variables {} are not present in the Serafin file'.format( variables_missing)) else: logger.critical( 'Variable {} is not present in the Serafin file'.format( variables_missing[0])) logger.critical( 'Check also `--lang` argument for variable detection.') sys.exit(1) if var_IDs not in PossibleFluxComputation.common_fluxes(): logger.warn( 'Flux computations is not common. Check what you are doing (or the language).' ) flux_type = PossibleFluxComputation.get_flux_type(var_IDs) section_names = ['Section %i' % (i + 1) for i in range(len(polylines))] calculator = FluxCalculator(flux_type, var_IDs, resin, section_names, polylines, args.ech) calculator.construct_triangles(tqdm) calculator.construct_intersections() result = [] for time_index, time in enumerate(tqdm(resin.time, unit='frame')): i_result = [str(time)] values = [] for var_ID in calculator.var_IDs: values.append(resin.read_var_in_frame(time_index, var_ID)) for j in range(len(polylines)): intersections = calculator.intersections[j] flux = calculator.flux_in_frame(intersections, values) i_result.append(settings.FMT_FLOAT.format(flux)) result.append(i_result) # Write CSV mode = 'w' if args.force else 'x' with open(args.out_csv, mode) as out_csv: calculator.write_csv(result, out_csv, args.sep)
def slf_max_over_files(args): if args.vars is None: with Serafin.Read(args.in_slfs[0], args.lang) as resin: resin.read_header() var_IDs = resin.header.var_IDs if args.vars is None else args.vars else: var_IDs = args.vars if args.operation == 'max': fun = np.maximum elif args.operation == 'min': fun = np.minimum else: raise NotImplementedError # Read polygons if args.in_polygons is not None: if not args.in_polygons.endswith('.shp'): logger.critical('File "%s" is not a shp file.' % args.in_polygons) sys.exit(3) polygons = [] try: for polygon in Shapefile.get_polygons(args.in_polygons): polygons.append(polygon) except ShapefileException as e: logger.error(e) sys.exit(3) if not polygons: logger.error('The file does not contain any polygon.') sys.exit(1) logger.info('The file contains {} polygon{}.'.format(len(polygons), 's' if len(polygons) > 1 else '')) else: polygons = None output_header = None out_values = None # min or max values mask_nodes = None for i, in_slf in enumerate(args.in_slfs): with Serafin.Read(in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) if not resin.header.is_2d: logger.critical('The file has to be a 2D Serafin!') sys.exit(3) resin.get_time() for var_ID in var_IDs: if var_ID not in resin.header.var_IDs: logger.critical('The variable %s is missing in %s' % (var_ID, in_slf)) sys.exit(3) if i == 0: output_header = resin.header.copy() output_header.empty_variables() for var_ID in var_IDs: output_header.add_variable_from_ID(var_ID) out_values = np.empty((output_header.nb_var, output_header.nb_nodes), dtype=output_header.np_float_type) if polygons is not None: mask_nodes = np.zeros(output_header.nb_nodes, dtype=bool) for idx_node, (x, y) in enumerate(zip(output_header.x, output_header.y)): point = Point(x, y) for polygon in polygons: if polygon.contains(point): mask_nodes[idx_node] = True break logger.info('Number of nodes inside polygon(s): %i (over %i)' % (mask_nodes.sum(), output_header.nb_nodes)) else: mask_nodes = np.ones(output_header.nb_nodes, dtype=bool) else: if not resin.header.same_2d_mesh(output_header): logger.critical('The mesh of %s is different from the first one' % in_slf) sys.exit(1) for time_index, time in enumerate(resin.time): for j, var_ID in enumerate(var_IDs): values = resin.read_var_in_frame(time_index, var_ID) if time_index == 0 and i == 0: out_values[j, :] = values else: out_values[j, mask_nodes] = fun(out_values[j, mask_nodes], values[mask_nodes]) with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout: resout.write_header(output_header) resout.write_entire_frame(output_header, 0.0, out_values)
def slf_volume(args): # Read set of lines from input file polygons = [] if args.in_polygons.endswith('.i2s'): with BlueKenue.Read(args.in_polygons) as f: f.read_header() for poly in f.get_polygons(): polygons.append(poly) elif args.in_polygons.endswith('.shp'): try: for polygon in Shapefile.get_polygons(args.in_polygons): polygons.append(polygon) except ShapefileException as e: logger.error(e) sys.exit(3) else: logger.error('File "%s" is not a i2s or shp file.' % args.in_polygons) sys.exit(2) if not polygons: logger.error('The file does not contain any polygon.') sys.exit(1) logger.debug('The file contains {} polygon{}.'.format( len(polygons), 's' if len(polygons) > 1 else '')) names = ['Polygon %d' % (i + 1) for i in range(len(polygons))] # Read Serafin file with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) resin.get_time() if not resin.header.is_2d: logger.error('The file has to be a 2D Serafin!') sys.exit(3) # Check variables consistency if args.upper_var not in resin.header.var_IDs: logger.error('Upper variable "%s" is not in Serafin file' % args.upper_var) sys.exit(1) upper_var = args.upper_var lower_var = args.lower_var if args.lower_var is not None: if args.lower_var == 'init': lower_var = VolumeCalculator.INIT_VALUE else: if lower_var not in resin.header.var_IDs: logger.error('Lower variable "%s" is not in Serafin file' % lower_var) sys.exit(1) if args.detailed: volume_type = VolumeCalculator.POSITIVE else: volume_type = VolumeCalculator.NET calculator = VolumeCalculator(volume_type, upper_var, lower_var, resin, names, polygons, args.ech) calculator.construct_triangles(tqdm) calculator.construct_weights(tqdm) result = [] for time_index in tqdm(calculator.time_indices, unit='frame'): i_result = [str(resin.time[time_index])] values = calculator.read_values_in_frame(time_index) for j in range(len(calculator.polygons)): weight = calculator.weights[j] volume = calculator.volume_in_frame_in_polygon( weight, values, calculator.polygons[j]) if calculator.volume_type == VolumeCalculator.POSITIVE: for v in volume: i_result.append(settings.FMT_FLOAT.format(v)) else: i_result.append(settings.FMT_FLOAT.format(volume)) result.append(i_result) # Write CSV mode = 'w' if args.force else 'x' with open(args.out_csv, mode) as out_csv: calculator.write_csv(result, out_csv, args.sep)
def slf_3d_to_2d(args): with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) resin.get_time() if resin.header.is_2d: logger.critical('The input file is not 3D.') sys.exit(1) if 'Z' not in resin.header.var_IDs: logger.critical( 'The elevation variable Z is not found in the Serafin file.') sys.exit(1) if args.layer is not None: upper_plane = resin.header.nb_planes if args.layer < 1 or args.layer > upper_plane: logger.critical('Layer has to be in [1, %i]' % upper_plane) sys.exit(1) output_header = resin.header.copy_as_2d() # Shift mesh coordinates if necessary if args.shift: output_header.transform_mesh( [Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)]) # Toggle output file endianness if necessary if args.toggle_endianness: output_header.toggle_endianness() # Convert to single precision if args.to_single_precision: if resin.header.is_double_precision(): output_header.to_single_precision() else: logger.warn( 'Input file is already single precision! Argument `--to_single_precision` is ignored' ) if args.aggregation is not None: if args.aggregation == 'max': operation_type = operations.MAX elif args.aggregation == 'min': operation_type = operations.MIN else: # args.aggregation == 'mean' operation_type = operations.MEAN selected_vars = [ var for var in output_header.iter_on_all_variables() ] vertical_calculator = operations.VerticalMaxMinMeanCalculator( operation_type, resin, output_header, selected_vars, args.vars) output_header.set_variables( vertical_calculator.get_variables()) # sort variables # Add some elevation variables for var_ID in args.vars: output_header.add_variable_from_ID(var_ID) with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout: resout.write_header(output_header) vars_2d = np.empty( (output_header.nb_var, output_header.nb_nodes_2d), dtype=output_header.np_float_type) for time_index, time in enumerate(tqdm(resin.time, unit='frame')): if args.aggregation is not None: vars_2d = vertical_calculator.max_min_mean_in_frame( time_index) else: for i, var in enumerate(output_header.var_IDs): vars_2d[i, :] = resin.read_var_in_frame_as_3d( time_index, var)[args.layer - 1, :] resout.write_entire_frame(output_header, time, vars_2d)