def run(self): if self.state == Node.SUCCESS: return try: with open(self.filename): pass except PermissionError: self.fail('Access denied.') return self.data = PolylineData() is_i2s = self.filename[-4:] == '.i2s' if is_i2s: with BlueKenue.Read(self.filename) as f: f.read_header() for poly in f.get_open_polylines(): self.data.add_line(poly) self.data.set_fields(['Value']) else: try: for poly in Shapefile.get_open_polylines(self.filename): self.data.add_line(poly) except ShapefileException as e: self.fail(e) return self.data.set_fields(Shapefile.get_all_fields(self.filename)) if self.data.is_empty(): self.fail('the file does not contain any 2D open polyline.') return self.success('The file contains {} open line{}.'.format( len(self.data), 's' if len(self.data) > 1 else ''))
def get_lines_from_file(filename, interp_coord='LINEAR'): """ Returns a list of ConstraintLine from an input file TODO 1: Value is ignored in i2s file format """ lines = [] if filename is not None: if filename.endswith('.i2s'): with bk.Read(filename) as in_i2s: in_i2s.read_header() for i, line in enumerate(in_i2s.get_open_polylines()): lines.append( ConstraintLine(i, list(line.polyline().coords), interp_coord)) elif filename.endswith('.shp'): if shp.get_shape_type(filename) not in (shapefile.POLYLINE, shapefile.POLYLINEZ, shapefile.POLYLINEM): raise TatooineException( "The type of file %s is not POLYLINEZ[M]" % filename) for i, line in enumerate(shp.get_open_polylines(filename)): lines.append( ConstraintLine(i, list(line.polyline().coords), interp_coord)) else: raise NotImplementedError( "Only shp and i2s formats are supported for constraint lines" ) return lines
def get_hydraulic_axis(infile_axis): """ @brief: Extract a unique line from i2s input file @param infile_axis <str>: path to file @return <shapely.geometry.LineString>: polyline representing the hydraulic axis """ if infile_axis.endswith('.i2s'): with bk.Read(infile_axis) as in_i2s: in_i2s.read_header() lines = list(in_i2s.get_open_polylines()) elif infile_axis.endswith('.shp'): if shp.get_shape_type(infile_axis) not in (shapefile.POLYLINE, shapefile.POLYLINEZ, shapefile.POLYLINEM): raise TatooineException("The type of file %s is not POLYLINE[ZM]" % infile_axis) lines = list(shp.get_open_polylines(infile_axis)) else: raise NotImplementedError( "Only shp and i2s formats are supported for hydraulic axis") nb_lines = len(lines) if nb_lines != 1: raise TatooineException( "The file '{}' contains {} polylines instead of a unique line to define " "the hydraulic axis".format(infile_axis, nb_lines)) return lines[0].polyline()
def get_zones_from_i3s_file(shp_name, threshold, operator_str): polylines = [] attributes = shp.get_numeric_attribute_names(shp_name) if args.attr_to_shift_z is not None: try: index_attr = [attr for _, attr in attributes].index(args.attr_to_shift_z) except ValueError: logger.critical('Attribute "%s" is not found.' % args.attr_to_shift_z) sys.exit(1) for polyline in shp.get_open_polylines(shp_name): if not polyline.polyline().is_valid: sys.exit("ERROR: polyline is not valid (probably because it intersects itself)!") # Shift z (if requested) if args.attr_to_shift_z is not None: dz = polyline.attributes()[index_attr] print(dz) polyline = polyline.apply_transformations([Transformation(0.0, 1.0, 1.0, 0.0, 0.0, dz)]) # Linear interpolation along the line for values below the threshold if threshold is not None: np_coord = np.array(polyline.coords()) Xt = np.sqrt(np.power(np.ediff1d(np_coord[:, 0], to_begin=0.), 2) + np.power(np.ediff1d(np_coord[:, 1], to_begin=0.), 2)) Xt = Xt.cumsum() ref_rows = np_coord[:, 2] > args.threshold np_coord[:, 2] = np.interp(Xt, Xt[ref_rows], np_coord[ref_rows, 2]) polyline = geo.LineString(np_coord) polylines.append(polyline) zones = [] for prev_line, next_line in zip(polylines[:-1], polylines[1:]): zones.append(Zone(prev_line, next_line, operator_str)) return zones
def from_file(filename, label, field_id=None, project_straight_line=False): section_seq = CrossSectionSequence() if filename.endswith('.i3s'): with bk.Read(filename) as in_i3s: in_i3s.read_header() for i, line in enumerate(in_i3s.get_open_polylines()): line_id = i if field_id is None else line.attributes()[ 0] # Use `Value` if field is not None z_array = np.array([(coord[2], ) for coord in line.polyline().coords], dtype=float_vars('Z')) line = line.to_2d() section = CrossSection(line_id, list(line.polyline().coords), label) section.coord.values = z_array section_seq.add_section(section) elif filename.endswith('.shp'): shp_type = shp.get_shape_type(filename) if shp_type in (shapefile.POLYLINEZ, shapefile.POLYLINEM): field_id_index = get_field_index(filename, field_id) for i, line in enumerate(shp.get_open_polylines(filename)): line_id = i if field_id is None else line.attributes( )[field_id_index] z_array = np.array([(coord[2], ) for coord in line.polyline().coords], dtype=float_vars(['Z'])) line = line.to_2d() section = CrossSection(line_id, list(line.polyline().coords), label) section.coord.values = z_array section_seq.add_section(section) elif shp_type in (shapefile.POINT, shapefile.POINTZ): field_id_index = get_field_index(filename, field_id) field_indexes, field_names = [], [] if shp_type == shapefile.POINTZ: field_names.append('Z') for index, name in shp.get_numeric_attribute_names(filename): if name.startswith('Z'): field_indexes.append(index) field_names.append(name) coords, z_layers = [], [] last_point_id = None for i, (point, attributes) in enumerate( shp.get_points(filename, with_z=shp_type == shapefile.POINTZ)): point_id = attributes[ field_id_index] # FIXME: should raise exception if field_id_index is None! if i > 0 and point_id != last_point_id: z_array = np.array(z_layers, dtype=float_vars(field_names)) section = CrossSection(last_point_id, coords, label) section.coord.values = z_array section_seq.add_section(section) coords, z_layers = [], [] coords.append(point[:2]) if shp_type == shapefile.POINTZ: z_layers.append((point[2], ) + tuple(attributes[index] for index in field_indexes)) else: z_layers.append( tuple(attributes[index] for index in field_indexes)) last_point_id = point_id z_array = np.array(z_layers, dtype=float_vars(field_names)) section = CrossSection(last_point_id, coords, label) section.coord.values = z_array section_seq.add_section(section) else: raise TatooineException( "The type of file %s is not POINT[Z] or POLYLINEZ[M]" % filename) else: raise NotImplementedError( "Only shp and i3s formats are supported for cross-sections") if project_straight_line: for section in section_seq: section.project_straight_line() return section_seq
def slf_flux2d(args): if len(args.scalars) > 2: logger.critical('Only two scalars can be integrated!') sys.exit(2) # Read set of lines from input file polylines = [] if args.in_sections.endswith('.i2s'): with BlueKenue.Read(args.in_sections) as f: f.read_header() for polyline in f.get_open_polylines(): polylines.append(polyline) elif args.in_sections.endswith('.shp'): try: for polyline in Shapefile.get_open_polylines(args.in_sections): polylines.append(polyline) except ShapefileException as e: logger.critical(e) sys.exit(3) else: logger.critical('File "%s" is not a i2s or shp file.' % args.in_sections) sys.exit(2) if not polylines: logger.critical('The file does not contain any open polyline.') sys.exit(1) logger.debug('The file contains {} open polyline{}.'.format( len(polylines), 's' if len(polylines) > 1 else '')) # Read Serafin file with Serafin.Read(args.in_slf, args.lang) as resin: resin.read_header() logger.info(resin.header.summary()) resin.get_time() if not resin.header.is_2d: logger.critical('The file has to be a 2D Serafin!') sys.exit(3) # Determine flux computations properties var_IDs = args.vectors + args.scalars variables_missing = [ var_ID for var_ID in var_IDs if var_ID not in resin.header.var_IDs ] if variables_missing: if len(variables_missing) > 1: logger.critical( 'Variables {} are not present in the Serafin file'.format( variables_missing)) else: logger.critical( 'Variable {} is not present in the Serafin file'.format( variables_missing[0])) logger.critical( 'Check also `--lang` argument for variable detection.') sys.exit(1) if var_IDs not in PossibleFluxComputation.common_fluxes(): logger.warn( 'Flux computations is not common. Check what you are doing (or the language).' ) flux_type = PossibleFluxComputation.get_flux_type(var_IDs) section_names = ['Section %i' % (i + 1) for i in range(len(polylines))] calculator = FluxCalculator(flux_type, var_IDs, resin, section_names, polylines, args.ech) calculator.construct_triangles(tqdm) calculator.construct_intersections() result = [] for time_index, time in enumerate(tqdm(resin.time, unit='frame')): i_result = [str(time)] values = [] for var_ID in calculator.var_IDs: values.append(resin.read_var_in_frame(time_index, var_ID)) for j in range(len(polylines)): intersections = calculator.intersections[j] flux = calculator.flux_in_frame(intersections, values) i_result.append(settings.FMT_FLOAT.format(flux)) result.append(i_result) # Write CSV mode = 'w' if args.force else 'x' with open(args.out_csv, mode) as out_csv: calculator.write_csv(result, out_csv, args.sep)
geometry.Polyline([(0, 0, 0), (10, 20, 30), (10, 40, 30), (0, 0, 0)], attributes=[0], m_array=[[0], [100], [200], [300]]) ] Shapefile.write_shp_lines('from_scratch/polygon.shp', shapefile.POLYGON, lines, 'PolyID') Shapefile.write_shp_lines('from_scratch/polygonz.shp', shapefile.POLYGONZ, lines, 'PolyID') Shapefile.write_shp_lines('from_scratch/polygonm.shp', shapefile.POLYGONM, lines, 'PolyID') # Open files just written for file_path in ['polyline.shp', 'polylinez.shp', 'polylinem.shp']: print( list( Shapefile.get_open_polylines( os.path.join('from_scratch', file_path)))) for file_path in ['polygon.shp', 'polygonz.shp', 'polygonm.shp']: print(list(Shapefile.get_polygons(os.path.join('from_scratch', file_path)))) # Convert from POINTZM shapefile conv = conversion.ShpPointConverter( '../data/shp/Point/POINTZM_dalle_lidar_simple.shp') conv.read() conv.write('xyz', 'from_shp_points/POINTZM_to_xyz.xyz', ('Z', '0')) conv.write('csv', 'from_shp_points/POINTZM_to_csv.csv', ('Z', 'M')) conv.write('shp Point', 'from_shp_points/POINTZM_to_POINT.shp', ('0', '0')) conv.write('shp PointZ', 'from_shp_points/POINTZM_to_POINTZM.shp', ('Z', 'M')) conv.write('shp PointM', 'from_shp_points/POINTZM_to_POINTM.csv', ('0', '0')) # Convert from POINTZM shapefile