def get_lines_from_file(filename, interp_coord='LINEAR'):
        """
        Returns a list of ConstraintLine from an input file
        TODO 1: Value is ignored in i2s file format
        """
        lines = []
        if filename is not None:
            if filename.endswith('.i2s'):
                with bk.Read(filename) as in_i2s:
                    in_i2s.read_header()
                    for i, line in enumerate(in_i2s.get_open_polylines()):
                        lines.append(
                            ConstraintLine(i, list(line.polyline().coords),
                                           interp_coord))

            elif filename.endswith('.shp'):
                if shp.get_shape_type(filename) not in (shapefile.POLYLINE,
                                                        shapefile.POLYLINEZ,
                                                        shapefile.POLYLINEM):
                    raise TatooineException(
                        "The type of file %s is not POLYLINEZ[M]" % filename)
                for i, line in enumerate(shp.get_open_polylines(filename)):
                    lines.append(
                        ConstraintLine(i, list(line.polyline().coords),
                                       interp_coord))

            else:
                raise NotImplementedError(
                    "Only shp and i2s formats are supported for constraint lines"
                )

        return lines
Esempio n. 2
0
    def run(self):
        if self.state == Node.SUCCESS:
            return
        try:
            with open(self.filename):
                pass
        except PermissionError:
            self.fail('Access denied.')
            return
        self.data = PolylineData()
        is_i2s = self.filename[-4:] == '.i2s'
        if is_i2s:
            with BlueKenue.Read(self.filename) as f:
                f.read_header()
                for poly in f.get_open_polylines():
                    self.data.add_line(poly)
            self.data.set_fields(['Value'])
        else:
            try:
                for poly in Shapefile.get_open_polylines(self.filename):
                    self.data.add_line(poly)
            except ShapefileException as e:
                self.fail(e)
                return
            self.data.set_fields(Shapefile.get_all_fields(self.filename))

        if self.data.is_empty():
            self.fail('the file does not contain any 2D open polyline.')
            return

        self.success('The file contains {} open line{}.'.format(
            len(self.data), 's' if len(self.data) > 1 else ''))
Esempio n. 3
0
def get_hydraulic_axis(infile_axis):
    """
    @brief: Extract a unique line from i2s input file
    @param infile_axis <str>: path to file
    @return <shapely.geometry.LineString>: polyline representing the hydraulic axis
    """
    if infile_axis.endswith('.i2s'):
        with bk.Read(infile_axis) as in_i2s:
            in_i2s.read_header()
            lines = list(in_i2s.get_open_polylines())
    elif infile_axis.endswith('.shp'):
        if shp.get_shape_type(infile_axis) not in (shapefile.POLYLINE,
                                                   shapefile.POLYLINEZ,
                                                   shapefile.POLYLINEM):
            raise TatooineException("The type of file %s is not POLYLINE[ZM]" %
                                    infile_axis)
        lines = list(shp.get_open_polylines(infile_axis))
    else:
        raise NotImplementedError(
            "Only shp and i2s formats are supported for hydraulic axis")
    nb_lines = len(lines)
    if nb_lines != 1:
        raise TatooineException(
            "The file '{}' contains {} polylines instead of a unique line to define "
            "the hydraulic axis".format(infile_axis, nb_lines))
    return lines[0].polyline()
Esempio n. 4
0
 def _build_polygon(self):
     outline_pts = list(self.polyline_1.coords) + list(reversed(self.polyline_2.coords))
     self.polygon = geo.Polygon(outline_pts)
     if not self.polygon.is_simple:  # FIXME: it should be "if not self.polygon.is_valid"
         print("Distance ligne = %s" % self.polyline_1.distance(self.polyline_2))
         print("Distance début = %s" % self.polyline_1.interpolate(0, normalized=True).distance(
             self.polyline_2.interpolate(0, normalized=True)))
         print("Distance fin = %s" % self.polyline_1.interpolate(1, normalized=True).distance(
             self.polyline_2.interpolate(1, normalized=True)))
         with bk.Write('debug.i3s') as out_i3s:
             out_i3s.write_header()
             out_i3s.write_lines([Polyline(self.polygon.exterior.coords)], [0.0])
         sys.exit("ERROR: Zone is invalid. Check polyline direction consistancy!")
Esempio n. 5
0
    def from_file(filename, label, field_id=None, project_straight_line=False):
        section_seq = CrossSectionSequence()

        if filename.endswith('.i3s'):
            with bk.Read(filename) as in_i3s:
                in_i3s.read_header()
                for i, line in enumerate(in_i3s.get_open_polylines()):
                    line_id = i if field_id is None else line.attributes()[
                        0]  # Use `Value` if field is not None
                    z_array = np.array([(coord[2], )
                                        for coord in line.polyline().coords],
                                       dtype=float_vars('Z'))
                    line = line.to_2d()
                    section = CrossSection(line_id,
                                           list(line.polyline().coords), label)
                    section.coord.values = z_array
                    section_seq.add_section(section)

        elif filename.endswith('.shp'):
            shp_type = shp.get_shape_type(filename)
            if shp_type in (shapefile.POLYLINEZ, shapefile.POLYLINEM):
                field_id_index = get_field_index(filename, field_id)
                for i, line in enumerate(shp.get_open_polylines(filename)):
                    line_id = i if field_id is None else line.attributes(
                    )[field_id_index]
                    z_array = np.array([(coord[2], )
                                        for coord in line.polyline().coords],
                                       dtype=float_vars(['Z']))
                    line = line.to_2d()
                    section = CrossSection(line_id,
                                           list(line.polyline().coords), label)
                    section.coord.values = z_array
                    section_seq.add_section(section)

            elif shp_type in (shapefile.POINT, shapefile.POINTZ):
                field_id_index = get_field_index(filename, field_id)
                field_indexes, field_names = [], []
                if shp_type == shapefile.POINTZ:
                    field_names.append('Z')
                for index, name in shp.get_numeric_attribute_names(filename):
                    if name.startswith('Z'):
                        field_indexes.append(index)
                        field_names.append(name)
                coords, z_layers = [], []
                last_point_id = None
                for i, (point, attributes) in enumerate(
                        shp.get_points(filename,
                                       with_z=shp_type == shapefile.POINTZ)):
                    point_id = attributes[
                        field_id_index]  # FIXME: should raise exception if field_id_index is None!
                    if i > 0 and point_id != last_point_id:
                        z_array = np.array(z_layers,
                                           dtype=float_vars(field_names))
                        section = CrossSection(last_point_id, coords, label)
                        section.coord.values = z_array
                        section_seq.add_section(section)
                        coords, z_layers = [], []
                    coords.append(point[:2])
                    if shp_type == shapefile.POINTZ:
                        z_layers.append((point[2], ) +
                                        tuple(attributes[index]
                                              for index in field_indexes))
                    else:
                        z_layers.append(
                            tuple(attributes[index]
                                  for index in field_indexes))
                    last_point_id = point_id
                z_array = np.array(z_layers, dtype=float_vars(field_names))
                section = CrossSection(last_point_id, coords, label)
                section.coord.values = z_array
                section_seq.add_section(section)

            else:
                raise TatooineException(
                    "The type of file %s is not POINT[Z] or POLYLINEZ[M]" %
                    filename)

        else:
            raise NotImplementedError(
                "Only shp and i3s formats are supported for cross-sections")

        if project_straight_line:
            for section in section_seq:
                section.project_straight_line()
        return section_seq
Esempio n. 6
0
    def export_sections(self, path):
        """
        Export generated profiles in a shp, i3s or georefC file
        /!\ Not relevant if constant_long_disc is False
        TODO: Use class MascaretGeoFile
        """
        values = self.interp_values_from_geom()
        if path.endswith('.georefC'):
            with open(path, 'w') as out_geo:
                for dist in np.unique(self.points['Xl']):
                    pos = self.points['Xl'] == dist
                    points = self.points[pos]

                    # Compute Xt  (FIXME: rather keep from previous calculations...)
                    Xt = np.sqrt(
                        np.power(np.ediff1d(points['X'], to_begin=0.), 2) +
                        np.power(np.ediff1d(points['Y'], to_begin=0.), 2))
                    Xt = Xt.cumsum()
                    points = append_fields(points, 'Xt', Xt, usemask=False)

                    for i, row in enumerate(points):
                        if i == 0:
                            positions_str = ' %f %f %f %f' % (
                                row['X'], row['Y'], points[-1]['X'],
                                points[-1]['Y'])
                            positions_str += ' AXE %f %f' % (
                                row['X'], row['Y']
                            )  # FIXME: not the axis position...
                            out_geo.write(
                                'Profil Bief_0 %s %f%s\n' %
                                ('P' + str(dist), dist, positions_str))

                        layers_str = ' ' + ' '.join([
                            COURLIS_FLOAT_FMT % x for x in values[:, pos][:, i]
                        ])

                        out_geo.write(
                            '%f%s B %f %f\n' %
                            (row['Xt'], layers_str, row['X'], row['Y']))
            return

        lines = []
        for dist in np.unique(self.points['Xl']):
            pos = self.points['Xl'] == dist
            line = geometry.Polyline([
                (x, y, z)
                for (x,
                     y), z in zip(self.points[pos][['X', 'Y']], values[0, :])
            ])
            line.add_attribute(dist)
            lines.append(line)

        if path.endswith('.i3s'):
            with bk.Write(path) as out_i3s:
                out_i3s.write_header()
                out_i3s.write_lines(lines, [l.attributes()[0] for l in lines])

        elif path.endswith('.shp'):
            shp.write_shp_lines(path, shapefile.POLYLINEZ, lines, 'Z')

        else:
            raise NotImplementedError(
                "Only the shp (POLYLINEZ), i3s and georefC formats are supported for "
                "the generated cross-sections file")
Esempio n. 7
0
def slf_flux2d(args):
    if len(args.scalars) > 2:
        logger.critical('Only two scalars can be integrated!')
        sys.exit(2)

    # Read set of lines from input file
    polylines = []
    if args.in_sections.endswith('.i2s'):
        with BlueKenue.Read(args.in_sections) as f:
            f.read_header()
            for polyline in f.get_open_polylines():
                polylines.append(polyline)
    elif args.in_sections.endswith('.shp'):
        try:
            for polyline in Shapefile.get_open_polylines(args.in_sections):
                polylines.append(polyline)
        except ShapefileException as e:
            logger.critical(e)
            sys.exit(3)
    else:
        logger.critical('File "%s" is not a i2s or shp file.' %
                        args.in_sections)
        sys.exit(2)

    if not polylines:
        logger.critical('The file does not contain any open polyline.')
        sys.exit(1)
    logger.debug('The file contains {} open polyline{}.'.format(
        len(polylines), 's' if len(polylines) > 1 else ''))

    # Read Serafin file
    with Serafin.Read(args.in_slf, args.lang) as resin:
        resin.read_header()
        logger.info(resin.header.summary())
        resin.get_time()

        if not resin.header.is_2d:
            logger.critical('The file has to be a 2D Serafin!')
            sys.exit(3)

        # Determine flux computations properties
        var_IDs = args.vectors + args.scalars
        variables_missing = [
            var_ID for var_ID in var_IDs if var_ID not in resin.header.var_IDs
        ]
        if variables_missing:
            if len(variables_missing) > 1:
                logger.critical(
                    'Variables {} are not present in the Serafin file'.format(
                        variables_missing))
            else:
                logger.critical(
                    'Variable {} is not present in the Serafin file'.format(
                        variables_missing[0]))
            logger.critical(
                'Check also `--lang` argument for variable detection.')
            sys.exit(1)
        if var_IDs not in PossibleFluxComputation.common_fluxes():
            logger.warn(
                'Flux computations is not common. Check what you are doing (or the language).'
            )

        flux_type = PossibleFluxComputation.get_flux_type(var_IDs)

        section_names = ['Section %i' % (i + 1) for i in range(len(polylines))]
        calculator = FluxCalculator(flux_type, var_IDs, resin, section_names,
                                    polylines, args.ech)
        calculator.construct_triangles(tqdm)
        calculator.construct_intersections()
        result = []
        for time_index, time in enumerate(tqdm(resin.time, unit='frame')):
            i_result = [str(time)]
            values = []

            for var_ID in calculator.var_IDs:
                values.append(resin.read_var_in_frame(time_index, var_ID))

            for j in range(len(polylines)):
                intersections = calculator.intersections[j]
                flux = calculator.flux_in_frame(intersections, values)
                i_result.append(settings.FMT_FLOAT.format(flux))

            result.append(i_result)

        # Write CSV
        mode = 'w' if args.force else 'x'
        with open(args.out_csv, mode) as out_csv:
            calculator.write_csv(result, out_csv, args.sep)
Esempio n. 8
0
def slf_volume(args):
    # Read set of lines from input file
    polygons = []
    if args.in_polygons.endswith('.i2s'):
        with BlueKenue.Read(args.in_polygons) as f:
            f.read_header()
            for poly in f.get_polygons():
                polygons.append(poly)
    elif args.in_polygons.endswith('.shp'):
        try:
            for polygon in Shapefile.get_polygons(args.in_polygons):
                polygons.append(polygon)
        except ShapefileException as e:
            logger.error(e)
            sys.exit(3)
    else:
        logger.error('File "%s" is not a i2s or shp file.' % args.in_polygons)
        sys.exit(2)

    if not polygons:
        logger.error('The file does not contain any polygon.')
        sys.exit(1)
    logger.debug('The file contains {} polygon{}.'.format(len(polygons), 's' if len(polygons) > 1 else ''))

    names = ['Polygon %d' % (i + 1) for i in range(len(polygons))]

    # Read Serafin file
    with Serafin.Read(args.in_slf, args.lang) as resin:
        resin.read_header()
        logger.info(resin.header.summary())
        resin.get_time()

        if not resin.header.is_2d:
            logger.error('The file has to be a 2D Serafin!')
            sys.exit(3)

        # Check variables consistency
        if args.upper_var not in resin.header.var_IDs:
            logger.error('Upper variable "%s" is not in Serafin file' % args.upper_var)
            sys.exit(1)
        upper_var = args.upper_var
        lower_var = args.lower_var
        if args.lower_var is not None:
            if args.lower_var == 'init':
                lower_var = VolumeCalculator.INIT_VALUE
            else:
                if lower_var not in resin.header.var_IDs:
                    logger.error('Lower variable "%s" is not in Serafin file' % lower_var)
                    sys.exit(1)

        if args.detailed:
            volume_type = VolumeCalculator.POSITIVE
        else:
            volume_type = VolumeCalculator.NET
        calculator = VolumeCalculator(volume_type, upper_var, lower_var, resin, names, polygons, args.ech)
        calculator.construct_triangles(tqdm)
        calculator.construct_weights(tqdm)

        result = []
        for time_index in tqdm(calculator.time_indices, unit='frame'):
            i_result = [str(resin.time[time_index])]
            values = calculator.read_values_in_frame(time_index)

            for j in range(len(calculator.polygons)):
                weight = calculator.weights[j]
                volume = calculator.volume_in_frame_in_polygon(weight, values, calculator.polygons[j])
                if calculator.volume_type == VolumeCalculator.POSITIVE:
                    for v in volume:
                        i_result.append(settings.FMT_FLOAT.format(v))
                else:
                    i_result.append(settings.FMT_FLOAT.format(volume))
            result.append(i_result)

        # Write CSV
        mode = 'w' if args.force else 'x'
        with open(args.out_csv, mode) as out_csv:
            calculator.write_csv(result, out_csv, args.sep)