Example #1
0
def mesh_and_interpolate(args):
    set_logger_level(args.verbose)
    t1 = perf_counter()

    logger.info("~> Reading input files")
    axe = get_hydraulic_axis(args.infile_axis)
    section_seq = CrossSectionSequence.from_file(
        args.infile_cross_sections,
        "Cross-section",
        field_id=args.attr_cross_sections,
        project_straight_line=args.project_straight_line)

    # if args.infile_epis is not None and args.dist_corr_epi is not None:
    #     epis = CrossSectionSequence.from_file(args.infile_epis, "Groynes", field_id=args.attr_epis,
    #                                           project_straight_line=args.project_straight_line)
    # else:
    #     epis = None

    section_seq.compute_dist_proj_axe(axe, args.dist_max)
    section_seq.check_intersections()
    section_seq.sort_by_dist()
    # section_seq.export_sections_shp('profiles_projected.shp')  # DEBUG

    if args.infile_constraint_lines is None:
        constraint_lines = ConstraintLine.get_lines_and_set_limits_from_sections(
            section_seq, args.interp_constraint_lines)
    else:
        constraint_lines = ConstraintLine.get_lines_from_file(
            args.infile_constraint_lines, args.interp_constraint_lines)
        if args.nb_pts_lat is not None and len(constraint_lines) != 2:
            raise TatooineException(
                "Argument `--nb_pts_lat` is only compatible with 2 constraint lines!"
            )
        if args.interp_values.startswith('BI') and len(constraint_lines) != 2:
            raise TatooineException(
                "A 2D interpolation is only compatible with 2 constraint lines!"
            )
        section_seq.find_and_add_limits(constraint_lines, args.dist_max)

    mesh_constr = MeshConstructor(section_seq=section_seq,
                                  lat_step=args.lat_step,
                                  nb_pts_lat=args.nb_pts_lat,
                                  interp_values=args.interp_values)
    mesh_constr.build_interp(constraint_lines, args.long_step,
                             args.constant_long_disc)
    # mesh_constr.export_segments('check_segments.shp')  # DEBUG

    # if epis is not None:
    #     mesh_constr.corr_bathy_on_epis(epis, args.dist_corr_epi)

    if args.outfile_nodes is not None:
        mesh_constr.export_points(args.outfile_nodes)

    if args.outfile_mesh is not None:
        mesh_constr.build_mesh()
        mesh_constr.export_mesh(args.outfile_mesh, lang=args.lang)

    t2 = perf_counter()
    logger.info("=> Execution time: {}s".format(t2 - t1))
Example #2
0
 def summary(self):
     try:
         nnode, nelem = len(self.triangle['vertices']), len(
             self.triangle['triangles'])
     except KeyError:
         raise TatooineException("The generation of the mesh failed!")
     return "Mesh with {} nodes and {} elements".format(nnode, nelem)
Example #3
0
    def project_straight_line(self):
        """
        @brief: Planar projection on a straight line joining first and last point of the cross-section
        """
        if self.limits:
            raise TatooineException(
                "Limits have to be set after calling to project_straight_line!"
            )

        # Build straight line
        first_point = (self.coord.array['X'][0], self.coord.array['Y'][0])
        last_point = (self.coord.array['X'][-1], self.coord.array['Y'][-1])
        line = LineString((first_point, last_point))

        # Update X, Y, Xt columns in self.coord.array
        for row in self.coord.array:
            point = Point((row['X'], row['Y']))
            dist = line.project(point)
            point_project = line.interpolate(dist)
            row['X'] = point_project.x
            row['Y'] = point_project.y
        self.coord.compute_Xt()

        # Update geom
        self.geom = self.coord.convert_as_linestring()
    def get_lines_from_file(filename, interp_coord='LINEAR'):
        """
        Returns a list of ConstraintLine from an input file
        TODO 1: Value is ignored in i2s file format
        """
        lines = []
        if filename is not None:
            if filename.endswith('.i2s'):
                with bk.Read(filename) as in_i2s:
                    in_i2s.read_header()
                    for i, line in enumerate(in_i2s.get_open_polylines()):
                        lines.append(
                            ConstraintLine(i, list(line.polyline().coords),
                                           interp_coord))

            elif filename.endswith('.shp'):
                if shp.get_shape_type(filename) not in (shapefile.POLYLINE,
                                                        shapefile.POLYLINEZ,
                                                        shapefile.POLYLINEM):
                    raise TatooineException(
                        "The type of file %s is not POLYLINEZ[M]" % filename)
                for i, line in enumerate(shp.get_open_polylines(filename)):
                    lines.append(
                        ConstraintLine(i, list(line.polyline().coords),
                                       interp_coord))

            else:
                raise NotImplementedError(
                    "Only shp and i2s formats are supported for constraint lines"
                )

        return lines
def densify_cross_sections(args):
    set_logger_level(args.verbose)
    t1 = perf_counter()

    logger.info("~> Reading input files")
    axe = get_hydraulic_axis(args.infile_axis)
    section_seq = CrossSectionSequence.from_file(
        args.infile_cross_sections,
        "Cross-section",
        field_id=args.attr_cross_sections,
        project_straight_line=args.project_straight_line)

    section_seq.compute_dist_proj_axe(axe, args.dist_max)
    section_seq.check_intersections()
    section_seq.sort_by_dist()

    if args.infile_constraint_lines is None:
        constraint_lines = ConstraintLine.get_lines_and_set_limits_from_sections(
            section_seq, args.interp_constraint_lines)
    else:
        constraint_lines = ConstraintLine.get_lines_from_file(
            args.infile_constraint_lines, args.interp_constraint_lines)
        if args.nb_pts_lat is not None and len(constraint_lines) != 2:
            raise TatooineException(
                "Argument `--nb_pts_lat` is only compatible with 2 constraint lines!"
            )
        if args.interp_values.startswith('BI') and len(constraint_lines) != 2:
            raise TatooineException(
                "A 2D interpolation is only compatible with 2 constraint lines!"
            )
        section_seq.find_and_add_limits(constraint_lines, args.dist_max)

    #section_seq.export_sections_shp('export_cross-sections.shp')

    mesh_constr = MeshConstructor(section_seq=section_seq,
                                  lat_step=args.lat_step,
                                  nb_pts_lat=args.nb_pts_lat,
                                  interp_values=args.interp_values)
    mesh_constr.build_interp(constraint_lines, args.long_step, True)
    mesh_constr.export_sections(args.outfile_sections)

    t2 = perf_counter()
    logger.info("=> Execution time: {}s".format(t2 - t1))
Example #6
0
    def build_mesh(self, in_floworiented_crs=False, opts='p'):
        """
        @brief: Build mesh under constraints
        @param in_floworiented_crs <bool>: boolean to which coordinate system is usec
        @param opts <str>: options for the triangulation.
            `p` - Triangulates a Planar Straight Line Graph.
            `q` - Quality mesh generation with no angles smaller than 20 degrees.
                  An alternate minimum angle may be specified after the `q`.
            `a` - Imposes a maximum triangle area constraint. A fixed area constraint (that applies to every triangle)
                  may be specified after the `a`, or varying areas may be read from the input dictionary.
        """
        logger.info("~> Building mesh")
        if in_floworiented_crs:
            tri = self.export_floworiented_triangulation_dict()
            self.triangle = triangle.triangulate(tri, opts=opts)
            self.triangle['vertices'] = self.export_triangulation_dict()[
                'vertices']  # overwrite by cartesian coordinates
        else:
            tri = self.export_triangulation_dict()
            self.triangle = triangle.triangulate(tri, opts=opts)

        if opts == 'p':  # Check that vertices correspond to points
            if len(self.points) != len(self.triangle['vertices']):
                if len(self.points) < len(self.triangle['vertices']):
                    logger.error("New nodes are:")
                    ori_points = np.column_stack(
                        (self.points['X'], self.points['Y']))
                    ori_combined = ori_points[:, 0] * ori_points[:, 1] / (
                        ori_points[:, 0] + ori_points[:, 1])
                    new_points = self.triangle['vertices']
                    new_combined = new_points[:, 0] * new_points[:, 1] / (
                        new_points[:, 0] + new_points[:, 1])
                    diff = np.setxor1d(ori_combined, new_combined)
                    logger.error(new_points[np.isin(new_combined, diff)])
                raise TatooineException(
                    "Mesh is corrupted... %i vs %i nodes." %
                    (len(self.points), len(self.triangle['vertices'])))

        if 'triangles' not in self.triangle:
            raise TatooineException(
                "Mesh was not generated, no triangle found!")
        logger.info(self.summary())
Example #7
0
    def extract_bed(self, bed1_id, bed2_id):
        """
        @brief: Extract coordinates of a portion of a cross-section between 2 limits
            /!\ bed1_id and bed2_id should be "ordered" correctly, otherwise an exception is raised
        @return <Bed>: structured array with columns ('X', 'Y', 'Xt', 'xt')
        """
        limit1 = self.get_limit_by_id(bed1_id)
        limit2 = self.get_limit_by_id(bed2_id)

        Xt1 = limit1['Xt_section']
        Xt2 = limit2['Xt_section']

        # Check that Xt are increasing from bed1_id to bed2_id
        if Xt1 > Xt2:
            raise TatooineException(
                "Order of beds {} and {} leads to decreasing Xt values for {}".
                format(bed1_id, bed2_id, self))

        Xt_section = self.coord.array['Xt']
        sub_coord = self.coord.array[np.logical_and(Xt_section >= Xt1,
                                                    Xt_section <= Xt2)]

        # Add starting point if necessary
        if Xt1 not in Xt_section:
            row = np.array([
                tuple(limit1[var] if var not in ('Xt', 'xt') else Xt1
                      for var in sub_coord.dtype.names)
            ],
                           dtype=sub_coord.dtype)
            row['xt'] = 0.0
            sub_coord = np.insert(sub_coord, 0, row)

        # Add last point if necessary
        if Xt2 not in Xt_section:
            row = np.array([
                tuple(limit2[var] if var not in ('Xt', 'xt') else Xt2
                      for var in sub_coord.dtype.names)
            ],
                           dtype=sub_coord.dtype)
            row['xt'] = 1.0
            sub_coord = np.append(sub_coord, row)

        # Check order of points
        if not strictly_increasing(sub_coord['Xt']):
            logger.debug("/!\ Xt values are not strictly increasing"
                         )  # FIXME: It should not happen!
            logger.debug(sub_coord['Xt'])
            logger.debug("Please check the following limits below:")
            logger.debug(limit1)
            logger.debug(limit2)
            points_to_keep = np.ediff1d(sub_coord['Xt'], to_begin=1.) != 0.
            sub_coord = sub_coord[points_to_keep]

        return Bed(sub_coord, ['xt'])
Example #8
0
 def export_segments(self, path):
     if path.endswith('.shp'):
         logger.info("~> Export segments in a shp file")
         with shapefile.Writer(path, shapeType=shapefile.POLYLINE) as w:
             w.field('id_seg', 'N', decimal=6)
             for i, (node1, node2) in enumerate(self.segments):
                 point1 = self.points[node1]
                 point2 = self.points[node2]
                 w.line([[[point1['X'], point1['Y']],
                          [point2['X'], point2['Y']]]])
                 w.record(id_seg=i)
     else:
         raise TatooineException(
             "Only the shp format is supported for segments")
Example #9
0
 def corr_bathy_on_epis(self, epis, dist_corr_epi):
     raise NotImplementedError  #TODO
     logger.info("~> Correction bathymetry around 'épis'")
     if self.var_names() != ['Z']:
         raise TatooineException(
             "Impossible to correct elevation of 'épis' for the sediment layers"
         )
     for epi in epis:
         epi_geom = epi.coord.convert_as_linestring()
         for i, coord in enumerate(self.points):
             pt_node = Point((coord['X'], coord['Y']))
             if epi_geom.distance(pt_node) < dist_corr_epi:
                 Xt_proj = epi_geom.project(pt_node)
                 pt_proj = epi_geom.interpolate(Xt_proj)
                 epi.coord.values['Z'][i] = pt_proj.z
Example #10
0
    def compute_xp(self):
        """
        Compute dimensionless from starting to ending point distance projetée adimensionnée sur droite début->fin
        """
        trace = LineString([self.array[['X', 'Y']][0], self.array[['X', 'Y']][-1]])
        Xp = np.empty(len(self.array), dtype=np.float)

        for i, row in enumerate(self.array):
            point = Point(list(row)[:2])
            Xp[i] = trace.project(point)

        if not strictly_increasing(Xp):
            raise TatooineException("L'abscisse projetée n'est pas strictement croissante le long du profil. "
                                    "Le profil n'est pas suffisamment droit...")

        xp = Xp/Xp[-1]
        self.array = append_fields(self.array, 'xp', xp, usemask=False)
Example #11
0
    def find_and_add_limit(self, constraint_line, dist_max=None):
        """
        @param constraint_line <shapely.geometry.LineString>: 2D constraint line
        @param dist_max <float>: maximum search distance to rescue intersections for limits
        """
        if self.geom.intersects(constraint_line.geom):
            intersection = self.geom.intersection(constraint_line.geom)

            if isinstance(intersection, MultiPoint):
                logger.warn(
                    "Intersection between '{}' and '{}' contains multiple points, "
                    "only the first is kept.".format(self, constraint_line))
                intersection = intersection[0]

            if isinstance(intersection, Point):
                # Compute projections
                Xt_section = self.geom.project(intersection)
                Xt_line = constraint_line.geom.project(intersection)
                self.add_limit(constraint_line.id, Xt_section, Xt_line,
                               intersection)

            else:
                raise TatooineException(
                    "Intersection between '{}' and '{}' is empty or not supported: {}"
                    .format(self, constraint_line, type(intersection)))
        else:
            if dist_max is not None:
                distance = self.geom.distance(constraint_line.geom)
                if distance < dist_max:
                    for i, coord in enumerate(constraint_line.coord):
                        # Try to find a point of the constraint line which is in the vicinity of the current section
                        point = Point(coord)
                        dist = self.geom.distance(point)
                        if dist < dist_max:
                            # A point is found and is considered
                            Xt_line = constraint_line.geom.project(point)
                            Xt_section = self.geom.project(point)
                            intersection = self.geom.interpolate(Xt_section)
                            self.add_limit(constraint_line.id, Xt_section,
                                           Xt_line, intersection)
                            logger.debug(
                                "Add a limit with the line {} after {} iterations (distance = {})"
                                .format(constraint_line.id, i, dist))
                            break
Example #12
0
    def add_points(self, coord, zone_index, xl, bed_index):
        """!
        @brief: Add vertices/nodes
        @param coord <2D-array float>: table with columns 'X', 'Y', 'Xt_upstream' and 'Xt_downstream'
        """
        if self.casiers_nodes_idx:
            raise TatooineException(
                "Impossible to add points in river bed after having considered the floodplain"
            )

        new_coord = np.empty(len(coord), dtype=self.points.dtype)
        # FIXME: avoid copying in using np.lib.recfunctions.append_fields?
        for var in ['X', 'Y', 'xt', 'Xt_upstream',
                    'Xt_downstream']:  # copy existing columns
            new_coord[var] = coord[var]
        new_coord['zone'] = zone_index
        new_coord['bed'] = bed_index
        new_coord['Xl'] = self.section_seq[zone_index].dist_proj_axe * (1 - xl) + \
                          self.section_seq[zone_index + 1].dist_proj_axe * xl
        new_coord['xl'] = xl
        self.i_pt += len(new_coord)
        self.points = np.hstack((self.points, new_coord))
Example #13
0
    def __init__(self, id, coord, label='Cross-section'):
        """
        Create a cross-section from coordinates X and Y
        Limits are not set at the object instantiation

        @param id <integer|str>: unique identifier
        @label <str>: type of section (`Cross-section` or `Transverse constraint line`)
        @param coords <[tuple]>: sequence of X and Y coordinates
        """
        self.id = id
        self.label = label
        self.coord = Coord(np.array(coord, dtype=float_vars(['X', 'Y'])),
                           ['Xt', 'xt'])
        self.nb_points = len(self.coord.array)
        if len(coord) < 2:
            raise TatooineException("%s %s does not have at least 2 points!" %
                                    (label, id))
        self.geom = LineString(
            coord)  # FIXME: might contains duplicated points

        self.limits = OrderedDict()
        self.dist_proj_axe = -1
Example #14
0
    def export_points(self, path):
        if path.endswith('.xyz'):
            logger.info("~> Exports points in xyz")
            with open(path, 'wb') as fileout:
                z_array = self.interp_values_from_geom()[0, :]
                np.savetxt(fileout,
                           np.vstack((self.points['X'], self.points['Y'],
                                      z_array)).T,
                           delimiter=' ',
                           fmt='%.{}f'.format(DIGITS))

        elif path.endswith('.shp'):
            logger.info("~> Exports points in shp")
            z_array = self.interp_values_from_geom()[0, :]
            with shapefile.Writer(path, shapeType=shapefile.POINT) as w:
                w.field('zone', 'N', decimal=6)
                w.field('bed', 'N', decimal=6)
                w.field('Xt_upstream', 'N', decimal=6)
                w.field('Xt_downstream', 'N', decimal=6)
                w.field('xt', 'N', decimal=6)
                w.field('xl', 'N', decimal=6)
                w.field('Z', 'N', decimal=6)
                for row, z in zip(self.points, z_array):
                    w.point(row['X'], row['Y'])
                    w.record(
                        **{
                            'zone': float(row['zone']),
                            'bed': float(row['bed']),
                            'Xt_upstream': row['Xt_upstream'],
                            'Xt_downstream': row['Xt_downstream'],
                            'xt': row['xt'],
                            'xl': row['xl'],
                            'Z': z
                        })

        else:
            raise TatooineException(
                "Only shp and xyz are supported for points set")
Example #15
0
    def compute_dist_proj_axe(self, axe_geom, dist_max):
        """
        @brief: Compute distance along hydraulic axis
        @param axe_geom <shapely.geometry.LineString>: hydraulic axis (/!\ Beware of its orientation)
        @param dist_max <float>: maximum search distance to rescue intersections for limits
        """
        logger.info(
            "~> Compute distances along hydraulic axis to order cross-sections"
        )
        to_keep_list = []
        for section in self:
            section_geom = section.geom
            if section_geom.intersects(axe_geom):
                intersection = section_geom.intersection(axe_geom)
                if isinstance(intersection, Point):
                    section.dist_proj_axe = axe_geom.project(intersection)
                else:
                    raise TatooineException(
                        "Intersection between '{}' and the hydraulic axis "
                        "is not a unique point".format(section))
            else:
                if dist_max is not None:
                    for pos in (0, -1):
                        dist = section_geom.distance(
                            Point(axe_geom.coords[pos]))
                        if dist < dist_max:
                            section.dist_proj_axe = 0.0 if pos == 0 else axe_geom.length
            if section.dist_proj_axe == -1:
                logger.warn(
                    "{} do not intersect the hydraulic axis (distance = {}m) and is ignored"
                    .format(section, section.geom.distance(axe_geom)))
                to_keep_list.append(False)
            else:
                to_keep_list.append(True)

        self.section_list = [
            p for p, to_keep in zip(self.section_list, to_keep_list) if to_keep
        ]
Example #16
0
    def __init__(self, array, vars2add, remove_duplicates=False):
        """
        @param array: structured array. `X`, `Y` are compulsory but other variables (such as Z) and distances are optional
        @param var2add <[str]>: list including eventually `Xt` and/or `xt` top compute them if they are not already present
        @param remove_duplicates <bool>: remove consecutive duplicated points
        """
        self.array = array
        self.coord_labels = list(self.array.dtype.fields.keys())
        self.values = None

        if 'X' not in self.coord_labels and 'Y' not in self.coord_labels:
            raise TatooineException("Columns X and Y are compulsory.")

        if 'Xt' in vars2add:
            self.compute_Xt()
        if 'xt' in vars2add:
            self.compute_xt()

        if remove_duplicates:
            if not strictly_increasing(self.array['Xt']):
                logger.warn("Duplicated points are removed")
                # Suppression des doublons (points superposés dans la polyligne)
                points2keep = np.ediff1d(self.array['Xt'], to_begin=1.) != 0.
                self.array = self.array[points2keep]
def mesh_crue10_run(args):
    set_logger_level(args.verbose)
    t1 = perf_counter()

    # Read the model and its submodels from xml/shp files
    etude = Etude(args.infile_etu)
    modele = etude.get_modele(args.model_name)
    modele.read_all()
    logger.info(modele)
    for sous_modele in modele.liste_sous_modeles:
        sous_modele.remove_sectioninterpolee()
        sous_modele.normalize_geometry()
        logger.info(sous_modele.summary())
        # sous_modele.write_shp_limites_lits_numerotes('limites_lits.shp')  # DEBUG
    logger.info(modele)

    global_mesh_constr = MeshConstructor()

    # Handle branches in minor bed
    for i, branche in enumerate(modele.get_liste_branches()):
        # Ignore branch if branch_patterns is set and do not match with current branch name
        if args.branch_patterns is not None:
            ignore = True
            for pattern in args.branch_patterns:
                if pattern in branche.id:
                    ignore = False
                    break
        else:
            ignore = False

        if branche.type not in args.branch_types_filter or not branche.is_active:
            ignore = True

        if not ignore:
            logger.info("===== TRAITEMENT DE LA BRANCHE %s =====" % branche.id)
            axe = branche.geom
            try:
                section_seq = CrossSectionSequence()
                for crue_section in branche.liste_sections_dans_branche:
                    if isinstance(crue_section, SectionProfil):
                        coords = list(crue_section.get_coord(add_z=True))
                        section = CrossSection(crue_section.id,
                                               [(coord[0], coord[1])
                                                for coord in coords],
                                               'Section')

                        # Determine some variables (constant over the simulation) from the geometry
                        z = np.array([coord[2] for coord in coords])
                        is_bed_active = crue_section.get_is_bed_active_array()
                        mean_strickler = crue_section.get_friction_coeff_array(
                        )
                        section.coord.values = np.core.records.fromarrays(
                            np.column_stack(
                                (z, is_bed_active, mean_strickler)).T,
                            names=VARIABLES_FROM_GEOMETRY)

                        section_seq.add_section(section)

                section_seq.compute_dist_proj_axe(axe, args.dist_max)
                if len(section_seq) >= 2:
                    section_seq.check_intersections()
                    # section_seq.sort_by_dist() is useless because profiles are already sorted
                    constraint_lines = ConstraintLine.get_lines_and_set_limits_from_sections(
                        section_seq, args.interp_constraint_lines)

                    mesh_constr = MeshConstructor(
                        section_seq=section_seq,
                        lat_step=args.lat_step,
                        nb_pts_lat=args.nb_pts_lat,
                        interp_values=args.interp_values)
                    mesh_constr.build_interp(constraint_lines, args.long_step,
                                             args.constant_long_disc)
                    mesh_constr.build_mesh(in_floworiented_crs=True)

                    global_mesh_constr.append_mesh_constr(mesh_constr)
                else:
                    logger.warning("Branche ignorée par manque de sections")
            except TatooineException as e:
                logger.error(
                    "/!\\ Branche ignorée à cause d'une erreur bloquante :")
                logger.error(e.message)
            logger.info("\n")

    # Handle casiers in floodplain
    nb_casiers = len(modele.get_liste_casiers())
    if args.infile_dem and nb_casiers > 0:
        logger.info("===== TRAITEMENT DES CASIERS =====")

        if not os.path.exists(args.infile_dem):
            raise TatooineException("File not found: %s" % args.infile_dem)
        from gdal import Open
        raster = Open(args.infile_dem)
        dem_interp = interp_raster(raster)

        floodplain_step = args.floodplain_step if not None else args.long_step
        max_elem_area = floodplain_step * floodplain_step / 2.0
        simplify_dist = floodplain_step / 2.0

        for i, casier in enumerate(modele.get_liste_casiers()):
            if casier.is_active:
                if casier.geom is None:
                    raise TatooineException(
                        "Geometry of %s could not be found" % casier)
                line = casier.geom.simplify(simplify_dist)
                if not line.is_closed:
                    raise RuntimeError
                coords = resample_2d_line(
                    line.coords,
                    floodplain_step)[1:]  # Ignore last duplicated node

                hard_nodes_xy = np.array(coords, dtype=np.float)
                hard_nodes_idx = np.arange(0, len(hard_nodes_xy), dtype=np.int)
                hard_segments = np.column_stack(
                    (hard_nodes_idx, np.roll(hard_nodes_idx, 1)))

                tri = {
                    'vertices':
                    np.array(
                        np.column_stack(
                            (hard_nodes_xy[:, 0], hard_nodes_xy[:, 1]))),
                    'segments':
                    hard_segments,
                }
                triangulation = triangle.triangulate(tri,
                                                     opts='qpa%f' %
                                                     max_elem_area)

                nodes_xy = np.array(triangulation['vertices'], dtype=np.float)
                bottom = dem_interp(nodes_xy)
                points = unstructured_to_structured(np.column_stack(
                    (nodes_xy, bottom)),
                                                    names=['X', 'Y', 'Z'])

                global_mesh_constr.add_floodplain_mesh(triangulation, points)

    if len(global_mesh_constr.points) == 0:
        raise ExceptionCrue10(
            "Aucun point à traiter, adaptez l'option `--branch_patterns` et/ou `--branch_types_filter`"
        )

    logger.info(global_mesh_constr.summary()
                )  # General information about the merged mesh

    if args.infile_rcal:
        # Read rcal result file
        results = RunResults(args.infile_rcal)
        logger.info(results.summary())

        # Check result consistency
        missing_sections = modele.get_missing_active_sections(
            results.emh['Section'])
        if missing_sections:
            raise ExceptionCrue10("Sections manquantes :\n%s" %
                                  missing_sections)

        # Subset results to get requested variables at active sections
        varnames_1d = results.variables['Section']
        logger.info("Variables 1D disponibles aux sections: %s" % varnames_1d)
        try:
            pos_z = varnames_1d.index('Z')
        except ValueError:
            raise TatooineException(
                "La variable Z doit être présente dans les résultats aux sections"
            )
        if global_mesh_constr.has_floodplain:
            try:
                pos_z_fp = results.variables['Casier'].index('Z')
            except ValueError:
                raise TatooineException(
                    "La variable Z doit être présente dans les résultats aux casiers"
                )
        else:
            pos_z_fp = None

        pos_variables = [
            results.variables['Section'].index(var) for var in varnames_1d
        ]
        pos_sections_list = [
            results.emh['Section'].index(profil.id)
            for profil in global_mesh_constr.section_seq
        ]
        if global_mesh_constr.has_floodplain:
            pos_casiers_list = [
                results.emh['Casier'].index(casier.id)
                for casier in modele.get_liste_casiers() if casier.is_active
            ]
        else:
            pos_casiers_list = []

        additional_variables_id = ['H']
        if 'Vact' in varnames_1d:
            additional_variables_id.append('M')

        values_geom = global_mesh_constr.interp_values_from_geom()
        z_bottom = values_geom[0, :]
        with Serafin.Write(args.outfile_mesh, args.lang,
                           overwrite=True) as resout:
            title = '%s (written by TatooineMesher)' % os.path.basename(
                args.outfile_mesh)
            output_header = Serafin.SerafinHeader(title=title, lang=args.lang)
            output_header.from_triangulation(
                global_mesh_constr.triangle['vertices'],
                global_mesh_constr.triangle['triangles'] + 1)
            for var_name in VARIABLES_FROM_GEOMETRY:
                if var_name in ['B', 'W']:
                    output_header.add_variable_from_ID(var_name)
                else:
                    output_header.add_variable_str(var_name, var_name, '')
            for var_id in additional_variables_id:
                output_header.add_variable_from_ID(var_id)
            for var_name in varnames_1d:
                output_header.add_variable_str(var_name, var_name, '')
            resout.write_header(output_header)

            if args.calc_unsteady is None:
                for i, calc_name in enumerate(results.calc_steady_dict.keys()):
                    logger.info("~> Calcul permanent %s" % calc_name)
                    # Read a single *steady* calculation
                    res_steady = results.get_res_steady(calc_name)
                    variables_at_profiles = res_steady['Section'][
                        pos_sections_list, :][:, pos_variables]
                    if global_mesh_constr.has_floodplain:
                        z_at_casiers = res_steady['Casier'][pos_casiers_list,
                                                            pos_z_fp]
                    else:
                        z_at_casiers = None

                    # Interpolate between sections and set in casiers
                    values_res = global_mesh_constr.interp_values_from_res(
                        variables_at_profiles, z_at_casiers, pos_z)

                    # Compute water depth: H = Z - Zf and clip below 0m (avoid negative values)
                    depth = np.clip(values_res[pos_z, :] - z_bottom,
                                    a_min=0.0,
                                    a_max=None)

                    # Merge and write values
                    if 'Vact' in varnames_1d:
                        # Compute velocity magnitude from Vact and apply mask "is active bed"
                        velocity = values_res[
                            varnames_1d.index('Vact'), :] * values_geom[1, :]
                        values = np.vstack(
                            (values_geom, depth, velocity, values_res))
                    else:
                        values = np.vstack((values_geom, depth, values_res))

                    resout.write_entire_frame(output_header, 3600.0 * i,
                                              values)

            else:
                calc_unsteady = results.get_calc_unsteady(args.calc_unsteady)
                logger.info("Calcul transitoire %s" % args.calc_unsteady)
                res_unsteady = results.get_res_unsteady(args.calc_unsteady)

                for i, (time, _) in enumerate(calc_unsteady.frame_list):
                    logger.info("~> %fs" % time)
                    res_at_sections = res_unsteady['Section'][i, :, :]
                    variables_at_profiles = res_at_sections[
                        pos_sections_list, :][:, pos_variables]
                    if global_mesh_constr.has_floodplain:
                        z_at_casiers = res_unsteady['Casier'][i,
                                                              pos_casiers_list,
                                                              pos_z_fp]
                    else:
                        z_at_casiers = None

                    # Interpolate between sections
                    values_res = global_mesh_constr.interp_values_from_res(
                        variables_at_profiles, z_at_casiers, pos_z)

                    # Compute water depth: H = Z - Zf and clip below 0m (avoid negative values)
                    depth = np.clip(values_res[pos_z, :] - z_bottom,
                                    a_min=0.0,
                                    a_max=None)

                    # Merge and write values
                    if 'Vact' in varnames_1d:
                        # Compute velocity magnitude from Vact and apply mask "is active bed"
                        velocity = values_res[
                            varnames_1d.index('Vact'), :] * values_geom[1, :]
                        values = np.vstack(
                            (values_geom, depth, velocity, values_res))
                    else:
                        values = np.vstack((values_geom, depth, values_res))

                    resout.write_entire_frame(output_header, time, values)

    else:
        # Write a single frame with only variables from geometry
        global_mesh_constr.export_mesh(args.outfile_mesh, lang=args.lang)

    t2 = perf_counter()
    logger.info("=> Execution time: {}s".format(t2 - t1))
def mesh_mascaret_run(args):
    set_logger_level(args.verbose)
    t1 = perf_counter()

    masc_geo = MascaretGeoFile(args.infile_geo)
    logger.info("Read %s " % masc_geo)
    # masc_geo.export_shp_lines(args.infile_geo.replace('.georef', '.shp'))
    if not masc_geo.has_ref:
        raise TatooineException(
            "The file `%s` does not contain any georeferenced data" %
            masc_geo.file_name)

    global_mesh_constr = MeshConstructor()

    for reach_id, reach in masc_geo.reaches.items():
        logger.info(reach)
        section_seq = CrossSectionSequence()

        dist_proj_axe = 0.0
        prev_x, prev_y = 0.0, 0.0
        for section_idx, masc_section in enumerate(reach):
            section = CrossSection(
                masc_section.id,
                [(x, y) for x, y in zip(masc_section.x, masc_section.y)],
                "Cross-section")

            section.coord.values = np.core.records.fromarrays(
                np.column_stack((masc_section.z, )).T,
                names=VARIABLES_FROM_GEOMETRY)
            x, y = masc_section.axis
            if section_idx != 0:
                dist_proj_axe += sqrt((x - prev_x)**2 + (y - prev_y)**2)

            section.dist_proj_axe = dist_proj_axe
            prev_x, prev_y = x, y

            section_seq.add_section(section)

        if len(section_seq) >= 2:
            section_seq.check_intersections()
            # section_seq.sort_by_dist() is useless because cross-sections are already sorted
            constraint_lines = ConstraintLine.get_lines_and_set_limits_from_sections(
                section_seq, args.interp_constraint_lines)

            mesh_constr = MeshConstructor(section_seq=section_seq,
                                          lat_step=args.lat_step,
                                          nb_pts_lat=args.nb_pts_lat,
                                          interp_values=args.interp_values)
            mesh_constr.build_interp(constraint_lines, args.long_step,
                                     args.constant_long_disc)
            mesh_constr.build_mesh(in_floworiented_crs=True)

            global_mesh_constr.append_mesh_constr(mesh_constr)
        else:
            logger.error(
                "/!\\ Reach %s ignored because it does not contain at least 2 sections"
                % reach_id)

    if len(global_mesh_constr.points) == 0:
        raise ExceptionCrue10("No node in the generated mesh!")

    logger.info(global_mesh_constr.summary()
                )  # General information about the merged mesh

    if args.infile_res:
        masc_res = MascaretFile(args.infile_res)
        masc_res.get_reaches()
        nb_section_in_geom = masc_geo.nsections
        if masc_res.nsections != nb_section_in_geom:
            raise TatooineException(
                "The number of sections is different between geometry (%i) and results file (%i)"
                % (nb_section_in_geom, masc_res.nsections))

        varnames_1d = masc_res.varnames_dict['abbr']
        logger.info("Variables 1D available at sections: %s" % varnames_1d)
        try:
            pos_z = varnames_1d.index('Z')
        except ValueError:
            raise TatooineException(
                "The variable Z must be present in the results file")

        additional_variables_id = ['H']

        values_geom = global_mesh_constr.interp_values_from_geom()
        z_bottom = values_geom[0, :]
        with Serafin.Write(args.outfile_mesh, args.lang,
                           overwrite=True) as resout:
            title = '%s (written by TatooineMesher)' % os.path.basename(
                args.outfile_mesh)
            output_header = Serafin.SerafinHeader(title=title, lang=args.lang)
            output_header.from_triangulation(
                global_mesh_constr.triangle['vertices'],
                global_mesh_constr.triangle['triangles'] + 1)
            for var_name in VARIABLES_FROM_GEOMETRY:
                if var_name == 'B':
                    output_header.add_variable_from_ID(var_name)
                else:
                    output_header.add_variable_str(var_name, var_name, '')
            for var_id in additional_variables_id:
                output_header.add_variable_from_ID(var_id)
            for var_name in varnames_1d:
                output_header.add_variable_str(var_name, var_name, '')
            resout.write_header(output_header)

            for idx_time, time in enumerate(masc_res.times):
                variables_at_sections = masc_res.get_values(idx_time)[reach.id]

                # Interpolate between sections and set in casiers
                values_res = global_mesh_constr.interp_values_from_res(
                    variables_at_sections, None, pos_z)

                # Compute water depth: H = Z - Zf and clip below 0m (avoid negative values)
                depth = np.clip(values_res[pos_z, :] - z_bottom,
                                a_min=0.0,
                                a_max=None)

                values = np.vstack((values_geom, depth, values_res))
                resout.write_entire_frame(output_header, time, values)

    else:
        # Write a single frame with only variables from geometry
        global_mesh_constr.export_mesh(args.outfile_mesh, lang=args.lang)

    t2 = perf_counter()
    logger.info("=> Execution time: {}s".format(t2 - t1))
Example #19
0
    def from_file(filename, label, field_id=None, project_straight_line=False):
        section_seq = CrossSectionSequence()

        if filename.endswith('.i3s'):
            with bk.Read(filename) as in_i3s:
                in_i3s.read_header()
                for i, line in enumerate(in_i3s.get_open_polylines()):
                    line_id = i if field_id is None else line.attributes()[
                        0]  # Use `Value` if field is not None
                    z_array = np.array([(coord[2], )
                                        for coord in line.polyline().coords],
                                       dtype=float_vars('Z'))
                    line = line.to_2d()
                    section = CrossSection(line_id,
                                           list(line.polyline().coords), label)
                    section.coord.values = z_array
                    section_seq.add_section(section)

        elif filename.endswith('.shp'):
            shp_type = shp.get_shape_type(filename)
            if shp_type in (shapefile.POLYLINEZ, shapefile.POLYLINEM):
                field_id_index = get_field_index(filename, field_id)
                for i, line in enumerate(shp.get_open_polylines(filename)):
                    line_id = i if field_id is None else line.attributes(
                    )[field_id_index]
                    z_array = np.array([(coord[2], )
                                        for coord in line.polyline().coords],
                                       dtype=float_vars(['Z']))
                    line = line.to_2d()
                    section = CrossSection(line_id,
                                           list(line.polyline().coords), label)
                    section.coord.values = z_array
                    section_seq.add_section(section)

            elif shp_type in (shapefile.POINT, shapefile.POINTZ):
                field_id_index = get_field_index(filename, field_id)
                field_indexes, field_names = [], []
                if shp_type == shapefile.POINTZ:
                    field_names.append('Z')
                for index, name in shp.get_numeric_attribute_names(filename):
                    if name.startswith('Z'):
                        field_indexes.append(index)
                        field_names.append(name)
                coords, z_layers = [], []
                last_point_id = None
                for i, (point, attributes) in enumerate(
                        shp.get_points(filename,
                                       with_z=shp_type == shapefile.POINTZ)):
                    point_id = attributes[
                        field_id_index]  # FIXME: should raise exception if field_id_index is None!
                    if i > 0 and point_id != last_point_id:
                        z_array = np.array(z_layers,
                                           dtype=float_vars(field_names))
                        section = CrossSection(last_point_id, coords, label)
                        section.coord.values = z_array
                        section_seq.add_section(section)
                        coords, z_layers = [], []
                    coords.append(point[:2])
                    if shp_type == shapefile.POINTZ:
                        z_layers.append((point[2], ) +
                                        tuple(attributes[index]
                                              for index in field_indexes))
                    else:
                        z_layers.append(
                            tuple(attributes[index]
                                  for index in field_indexes))
                    last_point_id = point_id
                z_array = np.array(z_layers, dtype=float_vars(field_names))
                section = CrossSection(last_point_id, coords, label)
                section.coord.values = z_array
                section_seq.add_section(section)

            else:
                raise TatooineException(
                    "The type of file %s is not POINT[Z] or POLYLINEZ[M]" %
                    filename)

        else:
            raise NotImplementedError(
                "Only shp and i3s formats are supported for cross-sections")

        if project_straight_line:
            for section in section_seq:
                section.project_straight_line()
        return section_seq
Example #20
0
    def build_interp(self, constraint_lines, long_step, constant_long_disc):
        """
        Build interpolation, add points and segments

        @param constraint_lines <[ConstraintLine]>: list of constraint lines
        @param long_step <float>: longitudinal space step
        @param constant_long_disc <bool>
        """
        nb_pts_inter = 0
        self.build_initial_profiles()

        # LOOP ON ZONES (between 2 consecutive cross-sections)
        logger.info("~> Building mesh per zone and then per bed")

        for i, (prev_section, next_section) in enumerate(
                zip(self.section_seq, self.section_seq[1:])):
            logger.debug("> Zone n°{} : between {} and {}".format(
                i, prev_section, next_section))

            if constant_long_disc:
                nb_pts_inter = prev_section.compute_nb_pts_inter(
                    next_section, long_step)
                Xp_adm_list = np.linspace(0.0, 1.0, num=nb_pts_inter + 2)[1:-1]

            # Looking for common limits between cross-sections
            common_limits_id = prev_section.common_limits(
                next_section.limits.keys())
            logger.debug("Common limits: {}".format(list(common_limits_id)))

            if len(common_limits_id) < 2:
                raise TatooineException(
                    "No interpolation in the interval %i, between %s and %s (%i common limits)"
                    % (i, prev_section, next_section, len(common_limits_id)))

            else:
                first_bed = True
                # LOOP ON BEDS
                for j, (id1, id2) in enumerate(
                        zip(common_limits_id, common_limits_id[1:])):
                    pt_list_L1 = []
                    pt_list_L2 = []

                    logger.debug("Bed {}-{}".format(id1, id2))

                    # Extraction of cross-section portions (= beds)
                    bed_1 = prev_section.extract_bed(id1, id2)
                    bed_2 = next_section.extract_bed(id1, id2)

                    # Curvilinear abscissa along constraint lines
                    (Xp_profil1_L1,
                     Xp_profil1_L2) = prev_section.get_Xt_lines(id1, id2)
                    (Xp_profil2_L1,
                     Xp_profil2_L2) = next_section.get_Xt_lines(id1, id2)
                    dXp_L1 = Xp_profil2_L1 - Xp_profil1_L1
                    dXp_L2 = Xp_profil2_L2 - Xp_profil1_L2

                    if dXp_L1 < 0:
                        raise TatooineException(
                            "The constraint line {} is not oriented correctly".
                            format(id1))
                    if dXp_L2 < 0:
                        raise TatooineException(
                            "The constraint line {} is not oriented correctly".
                            format(id2))

                    if not constant_long_disc:
                        nb_pts_inter = math.ceil(
                            min(dXp_L1, dXp_L2) / long_step) - 1
                        Xp_adm_list = np.linspace(0.0,
                                                  1.0,
                                                  num=nb_pts_inter + 2)[1:-1]

                    L1_coord_int = constraint_lines[
                        id1].coord_sampling_along_line(Xp_profil1_L1,
                                                       Xp_profil2_L1,
                                                       Xp_adm_list)
                    L2_coord_int = constraint_lines[
                        id2].coord_sampling_along_line(Xp_profil1_L2,
                                                       Xp_profil2_L2,
                                                       Xp_adm_list)

                    # LOOP ON INTERMEDIATE CROSS-SECTIONS
                    for k in range(nb_pts_inter):
                        Xp = Xp_adm_list[k]
                        P1 = Point(tuple(L1_coord_int[k]))
                        P2 = Point(tuple(L2_coord_int[k]))

                        if self.nb_pts_lat is None:
                            nb_pts_lat = math.ceil(
                                P1.distance(P2) / self.lat_step) + 1
                        else:
                            nb_pts_lat = self.nb_pts_lat
                        array = bed_1.interp_coord_linear(
                            bed_2, Xp, nb_pts_lat)
                        bed_int = Bed(array, ['Xt', 'xt'])
                        bed_int.move_between_targets(P1, P2)
                        coord_int = bed_int.array[[
                            'X', 'Y', 'xt', 'Xt_upstream', 'Xt_downstream'
                        ]]  # Ignore `Xt`
                        pt_list_L1.append(self.i_pt + 1)

                        if not first_bed:
                            # ignore first point because the constraint line was already considered
                            coord_int = coord_int[1:]

                        self.add_points(coord_int, i, Xp, j)

                        pt_list_L2.append(self.i_pt)

                    pt_list_L2 = np.array(
                        [prev_section.get_limit_by_id(id2)['id_pt']] +
                        pt_list_L2 +
                        [next_section.get_limit_by_id(id2)['id_pt']])
                    self.add_segments_from_node_list(pt_list_L2)

                    if first_bed:
                        pt_list_L1 = np.array(
                            [prev_section.get_limit_by_id(id1)['id_pt']] +
                            pt_list_L1 +
                            [next_section.get_limit_by_id(id1)['id_pt']])
                        self.add_segments_from_node_list(pt_list_L1)
                        first_bed = False