def get_zones_from_i3s_file(shp_name, threshold, operator_str): polylines = [] attributes = shp.get_numeric_attribute_names(shp_name) if args.attr_to_shift_z is not None: try: index_attr = [attr for _, attr in attributes].index(args.attr_to_shift_z) except ValueError: logger.critical('Attribute "%s" is not found.' % args.attr_to_shift_z) sys.exit(1) for polyline in shp.get_open_polylines(shp_name): if not polyline.polyline().is_valid: sys.exit("ERROR: polyline is not valid (probably because it intersects itself)!") # Shift z (if requested) if args.attr_to_shift_z is not None: dz = polyline.attributes()[index_attr] print(dz) polyline = polyline.apply_transformations([Transformation(0.0, 1.0, 1.0, 0.0, 0.0, dz)]) # Linear interpolation along the line for values below the threshold if threshold is not None: np_coord = np.array(polyline.coords()) Xt = np.sqrt(np.power(np.ediff1d(np_coord[:, 0], to_begin=0.), 2) + np.power(np.ediff1d(np_coord[:, 1], to_begin=0.), 2)) Xt = Xt.cumsum() ref_rows = np_coord[:, 2] > args.threshold np_coord[:, 2] = np.interp(Xt, Xt[ref_rows], np_coord[ref_rows, 2]) polyline = geo.LineString(np_coord) polylines.append(polyline) zones = [] for prev_line, next_line in zip(polylines[:-1], polylines[1:]): zones.append(Zone(prev_line, next_line, operator_str)) return zones
def slf_int2d(args): # Read set of points file fields, indices = Shapefile.get_attribute_names(args.in_points) points = [] attributes = [] for point, attribute in Shapefile.get_points(args.in_points, indices): points.append(point) attributes.append(attribute) if not points: logger.critical('The Shapefile does not contain any point.') sys.exit(1) # Read Serafin file with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) if not resin.header.is_2d: logger.critical('The file has to be a 2D Serafin!') sys.exit(3) resin.get_time() output_header = resin.header.copy() mesh = MeshInterpolator(output_header, True) is_inside, point_interpolators = mesh.get_point_interpolators(points) nb_inside = sum(map(int, is_inside)) if nb_inside == 0: logger.critical('No point inside the mesh.') sys.exit(3) logger.debug( 'The file contains {} point{}. {} point{} inside the mesh'.format( len(points), 's' if len(points) > 1 else '', nb_inside, 's are' if nb_inside > 1 else ' is')) var_IDs = output_header.var_IDs if args.vars is None else args.vars mode = 'w' if args.force else 'x' with open(args.out_csv, mode, newline='') as csvfile: csvwriter = csv.writer(csvfile, delimiter=args.sep) header = ['time_id', 'time'] if args.long: header = header + [ 'point_id', 'point_x', 'point_y', 'variable', 'value' ] else: for pt_id, (x, y) in enumerate(points): for var in var_IDs: header.append( 'Point %d %s (%s|%s)' % (pt_id + 1, var, settings.FMT_COORD.format(x), settings.FMT_COORD.format(y))) csvwriter.writerow(header) for time_index, time in enumerate(tqdm(resin.time, unit='frame')): values = [time_index, time] for var_ID in var_IDs: var = resin.read_var_in_frame(time_index, var_ID) for pt_id, (point, point_interpolator) in enumerate( zip(points, point_interpolators)): if args.long: values_long = values + [str(pt_id + 1)] + [ settings.FMT_COORD.format(x) for x in point ] if point_interpolator is None: if args.long: csvwriter.writerow(values_long + [var_ID, settings.NAN_STR]) else: values.append(settings.NAN_STR) else: (i, j, k), interpolator = point_interpolator int_value = settings.FMT_FLOAT.format( interpolator.dot(var[[i, j, k]])) if args.long: csvwriter.writerow(values_long + [var_ID, int_value]) else: values.append(int_value) if not args.long: csvwriter.writerow(values)
help= 'write CSV with long format (variables are also in rows) instead of wide format', action='store_true') parser.add_argument( '--vars', nargs='+', help='variable(s) to extract (by default: every variables)', default=None, metavar=('VA', 'VB')) parser.add_group_general(['force', 'verbose']) if __name__ == '__main__': args = parser.parse_args() try: slf_int2d(args) except (Serafin.SerafinRequestError, Serafin.SerafinValidationError): # Message is already reported by slf logger sys.exit(1) except ShapefileException as e: logger.critical(e) sys.exit(3) except FileNotFoundError as e: logger.critical('Input file %s not found.' % e.filename) sys.exit(3) except FileExistsError as e: logger.critical( 'Output file %s already exists. Remove it or add `--force` argument' % e.filename) sys.exit(3)
def slf_sedi_chain(args): # Check that float parameters are positive (especially ws!) for arg in ('Cmud', 'ws', 'C', 'M'): if getattr(args, arg) < 0: logger.critical('The argument %s has to be positive' % args) sys.exit(1) with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) resin.get_time() us_equation = get_US_equation(args.friction_law) necessary_equations = get_necessary_equations(resin.header.var_IDs, ['TAU'], is_2d=True, us_equation=us_equation) if resin.header.nb_frames < 1: logger.critical('The input file must have at least one frame!') sys.exit(1) output_header = resin.header.copy() # Shift mesh coordinates if necessary if args.shift: output_header.transform_mesh( [Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)]) # Toggle output file endianness if necessary if args.toggle_endianness: output_header.toggle_endianness() # Convert to single precision if args.to_single_precision: if resin.header.is_double_precision(): output_header.to_single_precision() else: logger.warn( 'Input file is already single precision! Argument `--to_single_precision` is ignored' ) output_header.empty_variables() output_header.add_variable_from_ID('B') output_header.add_variable_from_ID('EV') with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout: resout.write_header(output_header) prev_time = None prev_tau = None initial_bottom = resin.read_var_in_frame(0, 'B') bottom = copy(initial_bottom) for time_index, time in enumerate(resin.time): tau = do_calculations_in_frame(necessary_equations, resin, time_index, ['TAU'], output_header.np_float_type, is_2d=True, us_equation=us_equation, ori_values={})[0] if prev_time is not None: dt = time - prev_time mean_tau = (prev_tau + tau) / 2 if args.Tcd > 0: bottom += args.Cmud * args.ws * args.C * \ (1 - np.clip(mean_tau/args.Tcd, a_min=None, a_max=1.)) * dt if args.Tce > 0: bottom -= args.Cmud * args.M * (np.clip( mean_tau / args.Tce, a_min=1., a_max=None) - 1.) * dt evol_bottom = bottom - initial_bottom resout.write_entire_frame(output_header, time, np.vstack((bottom, evol_bottom))) prev_time = time prev_tau = tau
def slf_flux2d(args): if len(args.scalars) > 2: logger.critical('Only two scalars can be integrated!') sys.exit(2) # Read set of lines from input file polylines = [] if args.in_sections.endswith('.i2s'): with BlueKenue.Read(args.in_sections) as f: f.read_header() for polyline in f.get_open_polylines(): polylines.append(polyline) elif args.in_sections.endswith('.shp'): try: for polyline in Shapefile.get_open_polylines(args.in_sections): polylines.append(polyline) except ShapefileException as e: logger.critical(e) sys.exit(3) else: logger.critical('File "%s" is not a i2s or shp file.' % args.in_sections) sys.exit(2) if not polylines: logger.critical('The file does not contain any open polyline.') sys.exit(1) logger.debug('The file contains {} open polyline{}.'.format( len(polylines), 's' if len(polylines) > 1 else '')) # Read Serafin file with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) resin.get_time() if not resin.header.is_2d: logger.critical('The file has to be a 2D Serafin!') sys.exit(3) # Determine flux computations properties var_IDs = args.vectors + args.scalars variables_missing = [ var_ID for var_ID in var_IDs if var_ID not in resin.header.var_IDs ] if variables_missing: if len(variables_missing) > 1: logger.critical( 'Variables {} are not present in the Serafin file'.format( variables_missing)) else: logger.critical( 'Variable {} is not present in the Serafin file'.format( variables_missing[0])) logger.critical( 'Check also `--lang` argument for variable detection.') sys.exit(1) if var_IDs not in PossibleFluxComputation.common_fluxes(): logger.warn( 'Flux computations is not common. Check what you are doing (or the language).' ) flux_type = PossibleFluxComputation.get_flux_type(var_IDs) section_names = ['Section %i' % (i + 1) for i in range(len(polylines))] calculator = FluxCalculator(flux_type, var_IDs, resin, section_names, polylines, args.ech) calculator.construct_triangles(tqdm) calculator.construct_intersections() result = [] for time_index, time in enumerate(tqdm(resin.time, unit='frame')): i_result = [str(time)] values = [] for var_ID in calculator.var_IDs: values.append(resin.read_var_in_frame(time_index, var_ID)) for j in range(len(polylines)): intersections = calculator.intersections[j] flux = calculator.flux_in_frame(intersections, values) i_result.append(settings.FMT_FLOAT.format(flux)) result.append(i_result) # Write CSV mode = 'w' if args.force else 'x' with open(args.out_csv, mode) as out_csv: calculator.write_csv(result, out_csv, args.sep)
def slf_max_over_files(args): if args.vars is None: with Serafin.Read(args.in_slfs[0], args.lang) as resin: resin.read_header() var_IDs = resin.header.var_IDs if args.vars is None else args.vars else: var_IDs = args.vars if args.operation == 'max': fun = np.maximum elif args.operation == 'min': fun = np.minimum else: raise NotImplementedError # Read polygons if args.in_polygons is not None: if not args.in_polygons.endswith('.shp'): logger.critical('File "%s" is not a shp file.' % args.in_polygons) sys.exit(3) polygons = [] try: for polygon in Shapefile.get_polygons(args.in_polygons): polygons.append(polygon) except ShapefileException as e: logger.error(e) sys.exit(3) if not polygons: logger.error('The file does not contain any polygon.') sys.exit(1) logger.info('The file contains {} polygon{}.'.format(len(polygons), 's' if len(polygons) > 1 else '')) else: polygons = None output_header = None out_values = None # min or max values mask_nodes = None for i, in_slf in enumerate(args.in_slfs): with Serafin.Read(in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) if not resin.header.is_2d: logger.critical('The file has to be a 2D Serafin!') sys.exit(3) resin.get_time() for var_ID in var_IDs: if var_ID not in resin.header.var_IDs: logger.critical('The variable %s is missing in %s' % (var_ID, in_slf)) sys.exit(3) if i == 0: output_header = resin.header.copy() output_header.empty_variables() for var_ID in var_IDs: output_header.add_variable_from_ID(var_ID) out_values = np.empty((output_header.nb_var, output_header.nb_nodes), dtype=output_header.np_float_type) if polygons is not None: mask_nodes = np.zeros(output_header.nb_nodes, dtype=bool) for idx_node, (x, y) in enumerate(zip(output_header.x, output_header.y)): point = Point(x, y) for polygon in polygons: if polygon.contains(point): mask_nodes[idx_node] = True break logger.info('Number of nodes inside polygon(s): %i (over %i)' % (mask_nodes.sum(), output_header.nb_nodes)) else: mask_nodes = np.ones(output_header.nb_nodes, dtype=bool) else: if not resin.header.same_2d_mesh(output_header): logger.critical('The mesh of %s is different from the first one' % in_slf) sys.exit(1) for time_index, time in enumerate(resin.time): for j, var_ID in enumerate(var_IDs): values = resin.read_var_in_frame(time_index, var_ID) if time_index == 0 and i == 0: out_values[j, :] = values else: out_values[j, mask_nodes] = fun(out_values[j, mask_nodes], values[mask_nodes]) with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout: resout.write_header(output_header) resout.write_entire_frame(output_header, 0.0, out_values)
def slf_bottom_friction(args): # Check argument consistency if args.in_strickler_zones is not None or args.in_strickler_attr is not None: if args.in_strickler_zones is None or args.in_strickler_attr is None: logger.critical( 'Both arguments `--in_strickler_zones` and `--in_strickler_attr` have to be defined.' ) sys.exit(2) # Read polygons to compute volume if not args.in_polygons.endswith('.shp'): logger.critical('File "%s" is not a shp file.' % args.in_polygons) sys.exit(3) polygons = [] try: for polygon in Shapefile.get_polygons(args.in_polygons): polygons.append(polygon) except ShapefileException as e: logger.error(e) sys.exit(3) if not polygons: logger.error('The file does not contain any polygon.') sys.exit(1) logger.debug('The file contains {} polygon{}.'.format( len(polygons), 's' if len(polygons) > 1 else '')) names = ['Polygon %d' % (i + 1) for i in range(len(polygons))] varIDs = ['US', 'TAU'] out_varIDs = ['W'] + varIDs pos_TAU = out_varIDs.index('TAU') with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() if not resin.header.is_2d: logger.critical('The file has to be a 2D Serafin!') sys.exit(3) in_varIDs = resin.header.var_IDs # Compute Strickler values if necessary ori_values = {} if args.in_strickler_zones is not None: if not args.in_strickler_zones.endswith('.shp'): logger.critical('File "%s" is not a shp file.' % args.in_strickler_zones) sys.exit(3) attributes = Shapefile.get_numeric_attribute_names( args.in_strickler_zones) try: index_attr = [attr for _, attr in attributes ].index(args.in_strickler_attr) except ValueError: logger.critical('Attribute "%s" is not found.' % args.in_strickler_attr) sys.exit(1) strickler_zones = [] try: for zone in Shapefile.get_polygons(args.in_strickler_zones): strickler_zones.append(zone) except ShapefileException as e: logger.error(e) sys.exit(3) if not strickler_zones: logger.error('The file does not contain any friction zone.') sys.exit(1) logger.debug('Recomputing friction coefficient values from zones') friction_coeff = np.full( resin.header.nb_nodes_2d, 0.0) # default value for nodes not included in any zone for i, (x, y) in enumerate( zip(tqdm(resin.header.x), tqdm(resin.header.y))): point = Point(x, y) for zone in strickler_zones: if zone.contains(point): friction_coeff[i] = zone.attributes()[index_attr] exit in_varIDs.append('W') ori_values['W'] = friction_coeff else: if 'W' not in resin.header.varIDs: logger.critical('The variable W is missing.') sys.exit(1) us_equation = None if args.friction_law: us_equation = get_US_equation(args.friction_law) resin.get_time() necessary_equations = get_necessary_equations(in_varIDs, out_varIDs, is_2d=True, us_equation=us_equation) calculator = VolumeCalculator(VolumeCalculator.NET, 'TAU', None, resin, names, polygons, 1) calculator.construct_triangles(tqdm) calculator.construct_weights(tqdm) output_header = resin.header.copy() output_header.empty_variables() for var_ID in out_varIDs: output_header.add_variable_from_ID(var_ID) with Serafin.Write(args.out_slf, args.lang, args.force) as resout: resout.write_header(output_header) mode = 'w' if args.force else 'x' with open(args.out_csv, mode, newline='') as csvfile: csvwriter = csv.writer(csvfile, delimiter=args.sep) csvwriter.writerow(['time'] + names) for time_index, time in enumerate(tqdm(resin.time)): values = do_calculations_in_frame( necessary_equations, resin, time_index, out_varIDs, resin.header.np_float_type, is_2d=True, us_equation=strickler_equation, ori_values=ori_values) resout.write_entire_frame(output_header, time, values) row = [time] for j in range(len(calculator.polygons)): weight = calculator.weights[j] volume = calculator.volume_in_frame_in_polygon( weight, values[pos_TAU], calculator.polygons[j]) row.append(volume) csvwriter.writerow(row)
def slf_3d_to_2d(args): with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) resin.get_time() if resin.header.is_2d: logger.critical('The input file is not 3D.') sys.exit(1) if 'Z' not in resin.header.var_IDs: logger.critical( 'The elevation variable Z is not found in the Serafin file.') sys.exit(1) if args.layer is not None: upper_plane = resin.header.nb_planes if args.layer < 1 or args.layer > upper_plane: logger.critical('Layer has to be in [1, %i]' % upper_plane) sys.exit(1) output_header = resin.header.copy_as_2d() # Shift mesh coordinates if necessary if args.shift: output_header.transform_mesh( [Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)]) # Toggle output file endianness if necessary if args.toggle_endianness: output_header.toggle_endianness() # Convert to single precision if args.to_single_precision: if resin.header.is_double_precision(): output_header.to_single_precision() else: logger.warn( 'Input file is already single precision! Argument `--to_single_precision` is ignored' ) if args.aggregation is not None: if args.aggregation == 'max': operation_type = operations.MAX elif args.aggregation == 'min': operation_type = operations.MIN else: # args.aggregation == 'mean' operation_type = operations.MEAN selected_vars = [ var for var in output_header.iter_on_all_variables() ] vertical_calculator = operations.VerticalMaxMinMeanCalculator( operation_type, resin, output_header, selected_vars, args.vars) output_header.set_variables( vertical_calculator.get_variables()) # sort variables # Add some elevation variables for var_ID in args.vars: output_header.add_variable_from_ID(var_ID) with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout: resout.write_header(output_header) vars_2d = np.empty( (output_header.nb_var, output_header.nb_nodes_2d), dtype=output_header.np_float_type) for time_index, time in enumerate(tqdm(resin.time, unit='frame')): if args.aggregation is not None: vars_2d = vertical_calculator.max_min_mean_in_frame( time_index) else: for i, var in enumerate(output_header.var_IDs): vars_2d[i, :] = resin.read_var_in_frame_as_3d( time_index, var)[args.layer - 1, :] resout.write_entire_frame(output_header, time, vars_2d)