Example #1
0
    def run_single(self, input_name, input_header, output_name, output_header,
                   pool):
        i = 0
        try:
            with Serafin.Read(input_name,
                              input_header.language) as input_stream:
                input_stream.header = input_header
                input_stream.get_time()
                inv_nb_frames = len(input_stream.time)

                with Serafin.Write(output_name,
                                   input_header.language) as output_stream:
                    output_stream.write_header(output_header)

                    for time_value, value_array in pool.evaluate_expressions(
                            self.augmented_path, input_stream,
                            self.selected_expressions):
                        if self.canceled:
                            return
                        i += 1
                        output_stream.write_entire_frame(
                            output_header, time_value, value_array)

                        self.tick.emit(100 * i * self.inv_nb_files *
                                       inv_nb_frames)
                        QApplication.processEvents()
        except (Serafin.SerafinRequestError,
                Serafin.SerafinValidationError) as e:
            QMessageBox.critical(self, 'Serafin Error', e.message,
                                 QMessageBox.Ok)
            return
Example #2
0
    def _run_simple(self, input_data):
        """!
        @brief Write Serafin without any operator
        @param input_data <slf.datatypes.SerafinData>: input SerafinData stream
        """
        output_header = input_data.default_output_header()
        with Serafin.Read(input_data.filename,
                          input_data.language) as input_stream:
            input_stream.header = input_data.header
            input_stream.time = input_data.time
            with Serafin.Write(self.filename, input_data.language,
                               True) as output_stream:
                output_stream.write_header(output_header)
                for i, time_index in enumerate(
                        input_data.selected_time_indices):
                    values = do_calculations_in_frame(
                        input_data.equations,
                        input_stream,
                        time_index,
                        input_data.selected_vars,
                        output_header.np_float_type,
                        is_2d=output_header.is_2d,
                        us_equation=input_data.us_equation)
                    output_stream.write_entire_frame(
                        output_header, input_data.time[time_index], values)

                    self.progress_bar.setValue(
                        100 * (i + 1) / len(input_data.selected_time_indices))
                    QApplication.processEvents()
        self.success('Output saved to {}.'.format(self.filename))
        return True
Example #3
0
    def _run_max_min_mean(self, input_data):
        """!
        @brief Write Serafin with `Temporal Min/Max/Mean` operator
        @param input_data <slf.datatypes.SerafinData>: input SerafinData stream
        """
        selected = [(var, input_data.selected_vars_names[var][0],
                     input_data.selected_vars_names[var][1])
                    for var in input_data.selected_vars]
        scalars, vectors, additional_equations = operations.scalars_vectors(
            input_data.header.var_IDs, selected, input_data.us_equation)
        output_header = input_data.header.copy()
        output_header.set_variables(scalars + vectors)
        if input_data.to_single:
            output_header.to_single_precision()

        with Serafin.Read(input_data.filename,
                          input_data.language) as input_stream:
            input_stream.header = input_data.header
            input_stream.time = input_data.time
            has_scalar, has_vector = False, False
            scalar_calculator, vector_calculator = None, None
            if scalars:
                has_scalar = True
                scalar_calculator = operations.ScalarMaxMinMeanCalculator(
                    input_data.operator, input_stream, scalars,
                    input_data.selected_time_indices, additional_equations)
            if vectors:
                has_vector = True
                vector_calculator = operations.VectorMaxMinMeanCalculator(
                    input_data.operator, input_stream, vectors,
                    input_data.selected_time_indices, additional_equations)
            for i, time_index in enumerate(input_data.selected_time_indices):

                if has_scalar:
                    scalar_calculator.max_min_mean_in_frame(time_index)
                if has_vector:
                    vector_calculator.max_min_mean_in_frame(time_index)

                self.progress_bar.setValue(
                    100 * (i + 1) / len(input_data.selected_time_indices))
                QApplication.processEvents()

            if has_scalar and not has_vector:
                values = scalar_calculator.finishing_up()
            elif not has_scalar and has_vector:
                values = vector_calculator.finishing_up()
            else:
                values = np.vstack((scalar_calculator.finishing_up(),
                                    vector_calculator.finishing_up()))

            with Serafin.Write(self.filename, input_data.language,
                               True) as output_stream:
                output_stream.write_header(output_header)
                output_stream.write_entire_frame(output_header,
                                                 input_data.time[0], values)
        self.success('Output saved to {}.'.format(self.filename))
        return True
    def btnSubmitEvent(self):
        canceled, filename = save_dialog('Serafin', self.data.filename)
        if canceled:
            return

        # fetch the list of selected variables
        selected_vars = self.inputTab.getSelectedVariables()

        # deduce header from selected variable IDs and write header
        output_header = self.getOutputHeader(selected_vars)

        # fetch the list of selected frames
        if self.timeSelection.manualSelection.hasData:
            output_time_indices = self.timeSelection.getManualTime()
            output_message = 'Writing the output with variables %s for %d frame%s between frame %d and %d.' \
                             % (str(output_header.var_IDs), len(output_time_indices),
                             ['', 's'][len(output_time_indices) > 1], output_time_indices[0]+1, output_time_indices[-1]+1)
        else:
            start_index, end_index, sampling_frequency, output_time_indices = self.timeSelection.getTime()
            output_message = 'Writing the output with variables %s between frame %d and %d with sampling frequency %d.' \
                             % (str(output_header.var_IDs), start_index, end_index, sampling_frequency)


        self.parent.inDialog()
        progressBar = OutputProgressDialog()

        # do some calculations
        try:
            with Serafin.Read(self.data.filename, self.data.language) as input_stream:
                # instead of re-reading the header and the time, just do a copy
                input_stream.header = self.data.header
                input_stream.time = self.data.time
                progressBar.setValue(5)
                QApplication.processEvents()

                with Serafin.Write(filename, self.data.language) as output_stream:
                    logging.info(output_message)

                    output_stream.write_header(output_header)

                    # do some additional computations
                    necessary_equations = get_necessary_equations(self.data.header.var_IDs, output_header.var_IDs,
                                                                  is_2d=output_header.is_2d,
                                                                  us_equation=self.data.us_equation)
                    process = ExtractVariablesThread(necessary_equations, self.data.us_equation, input_stream,
                                                     output_stream, output_header, output_time_indices)
                    progressBar.connectToThread(process)
                    process.run()

                    if not process.canceled:
                        progressBar.outputFinished()
                    progressBar.exec_()
                    self.parent.outDialog()
        except (Serafin.SerafinRequestError, Serafin.SerafinValidationError) as e:
            QMessageBox.critical(None, 'Serafin Error', e.message, QMessageBox.Ok, QMessageBox.Ok)
            return
Example #5
0
    def btnSubmitEvent(self):
        if not self.conditions:
            QMessageBox.critical(self, 'Error', 'Add at least one condition.',
                                 QMessageBox.Ok)
            return

        start_index = int(self.timeSelection.startIndex.text()) - 1
        end_index = int(self.timeSelection.endIndex.text())
        time_indices = list(range(start_index, end_index))

        if len(time_indices) == 1:
            QMessageBox.critical(self, 'Error', 'Start and end frame cannot be the same.',
                                 QMessageBox.Ok)
            return

        canceled, filename = save_dialog('Serafin', self.data.filename)
        if canceled:
            return

        # deduce header from selected variable IDs and write header
        output_header = self.getOutputHeader()
        output_message = 'Computing Arrival / Duration between frame %d and %d.' \
                          % (start_index+1, end_index)
        self.parent.inDialog()
        logging.info(output_message)
        progressBar = OutputProgressDialog()

        # do some calculations
        try:
            with Serafin.Read(self.data.filename, self.data.language) as input_stream:
                input_stream.header = self.data.header
                input_stream.time = self.data.time

                progressBar.setValue(5)
                QApplication.processEvents()

                with Serafin.Write(filename, self.data.language) as output_stream:
                    process = ArrivalDurationThread(input_stream, self.conditions, time_indices)
                    progressBar.connectToThread(process)
                    values = process.run()

                    if not process.canceled:
                        values = self._convertTimeUnit(time_indices, values)
                        output_stream.write_header(output_header)
                        output_stream.write_entire_frame(output_header, self.data.time[0], values)
                        progressBar.outputFinished()
        except (Serafin.SerafinRequestError, Serafin.SerafinValidationError) as e:
            QMessageBox.critical(None, 'Serafin Error', e.message, QMessageBox.Ok, QMessageBox.Ok)
            return

        progressBar.exec_()
        self.parent.outDialog()
Example #6
0
def slf_base(args):
    with Serafin.Read(args.in_slf, args.lang) as resin:
        resin.read_header()
        logger.info(resin.header.summary())
        resin.get_time()

        output_header = resin.header.copy()
        # Shift mesh coordinates if necessary
        if args.shift:
            output_header.transform_mesh([Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)])
        # Set mesh origin coordinates
        if args.set_mesh_origin:
            output_header.set_mesh_origin(args.set_mesh_origin[0], args.set_mesh_origin[1])

        # Toggle output file endianness if necessary
        if args.toggle_endianness:
            output_header.toggle_endianness()

        # Convert to single precision
        if args.to_single_precision:
            if resin.header.is_double_precision():
                output_header.to_single_precision()
            else:
                logger.warn('Input file is already single precision! Argument `--to_single_precision` is ignored')

        # Remove variables if necessary
        if args.var2del:
            output_header.empty_variables()
            for var_ID, var_name, var_unit in zip(resin.header.var_IDs, resin.header.var_names, resin.header.var_units):
                if var_ID not in args.var2del:
                    output_header.add_variable(var_ID, var_name, var_unit)

        # Add new derived variables
        if args.var2add is not None:
            for var_ID in args.var2add:
                if var_ID in output_header.var_IDs:
                    logger.warn('Variable %s is already present (or asked)' % var_ID)
                else:
                    output_header.add_variable_from_ID(var_ID)

        us_equation = get_US_equation(args.friction_law)
        necessary_equations = get_necessary_equations(resin.header.var_IDs, output_header.var_IDs,
                                                      is_2d=resin.header.is_2d, us_equation=us_equation)

        with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout:
            resout.write_header(output_header)

            for time_index, time in tqdm(resin.subset_time(args.start, args.end, args.ech), unit='frame'):
                values = do_calculations_in_frame(necessary_equations, resin, time_index, output_header.var_IDs,
                                                  output_header.np_float_type, is_2d=output_header.is_2d,
                                                  us_equation=us_equation, ori_values={})
                resout.write_entire_frame(output_header, time, values)
Example #7
0
def landxml_to_slf(args):
    root = ET.parse(args.in_xml).getroot()
    PREFIX = '{http://www.landxml.org/schema/LandXML-1.2}'

    nodes = []  # list of (x, y) coordinates
    ikle = []  # list of triangle triplet (1-indexed)
    output_header = None
    with Serafin.Write(args.out_slf, args.lang,
                       overwrite=args.force) as resout:
        for i, surface in enumerate(root.find(PREFIX + 'Surfaces')):
            surface_name = surface.get('name')
            if ' ' in surface_name:
                varname = surface_name.split(' ')[0]
            else:
                varname = surface_name
            # time_duration = surface_name.split(' ')[-1]
            tin = surface.find(PREFIX + 'Definition')
            values = []
            for j, pnts in enumerate(tin.find(PREFIX + 'Pnts')):
                assert int(pnts.get('id')) == j + 1
                y, x, z = (float(n) for n in pnts.text.split())
                values.append(z)
                if i == 0:
                    nodes.append((x, y))
                else:
                    if (x, y) != nodes[j]:
                        raise RuntimeError(
                            "Coordinates are not strictly identical")

            for j, face in enumerate(tin.find(PREFIX + 'Faces')):
                if 'id' in face.attrib:
                    assert int(face.get('id')) == j + 1
                n1, n2, n3 = (int(n) for n in face.text.split())
                if i == 0:
                    ikle.append((n1, n2, n3))
                else:
                    if (n1, n2, n3) != ikle[j]:
                        raise RuntimeError("Mesh is not strictly identical")

            if i == 0:
                output_header = Serafin.SerafinHeader(
                    title='Converted from LandXML (written by PyTelTools)')
                output_header.from_triangulation(
                    np.array(nodes, dtype=np.int64),
                    np.array(ikle, dtype=np.int64))
                output_header.add_variable_str(varname, varname, '')
                resout.write_header(output_header)

            time = i * 3600.0  # FIXME: should convert time_duration to float
            resout.write_entire_frame(output_header, time,
                                      np.expand_dims(np.array(values), axis=0))
Example #8
0
    def btnSubmitEvent(self):
        # fetch the list of selected variables
        selected_vars = self._getSelectedVariables()
        if not selected_vars:
            QMessageBox.critical(self, 'Error', 'Select at least one variable.',
                                 QMessageBox.Ok)
            return

        canceled, filename = save_dialog('Serafin', self.data.filename)
        if canceled:
            return

        # deduce header from selected variable IDs and write header
        output_header = self.getOutputHeader(selected_vars)

        start_index = int(self.timeSelection.startIndex.text()) - 1
        end_index = int(self.timeSelection.endIndex.text())
        time_indices = list(range(start_index, end_index))
        var = self.varBox.currentText().split(' (')[0]

        output_message = 'Computing SynchMax of variables %s between frame %d and %d.' \
                          % (str(list(map(lambda x: x[0], selected_vars[1:]))), start_index+1, end_index)
        self.parent.inDialog()
        logging.info(output_message)
        progressBar = OutputProgressDialog()

        # do some calculations
        try:
            with Serafin.Read(self.data.filename, self.data.language) as input_stream:
                input_stream.header = self.data.header
                input_stream.time = self.data.time

                progressBar.setValue(5)
                QApplication.processEvents()

                with Serafin.Write(filename, self.data.language) as output_stream:
                    process = SynchMaxThread(input_stream, selected_vars[1:], time_indices, var)
                    progressBar.connectToThread(process)
                    values = process.run()

                    if not process.canceled:
                        output_stream.write_header(output_header)
                        output_stream.write_entire_frame(output_header, self.data.time[0], values)
                        progressBar.outputFinished()
        except (Serafin.SerafinRequestError, Serafin.SerafinValidationError) as e:
            QMessageBox.critical(None, 'Serafin Error', e.message, QMessageBox.Ok, QMessageBox.Ok)
            return

        progressBar.exec_()
        self.parent.outDialog()
def bottom(inname, outname, i3s_names, overwrite, threshold):
    # global prev_line, zones, np_coord, Xt, Z, ref_rows, polyline
    with Serafin.Read(inname, 'fr') as resin:
        resin.read_header()

        if not resin.header.is_2d:
            sys.exit("The current script is working only with 2D meshes !")

        resin.get_time()

        # Define zones from polylines
        zones = []
        for i3s_name in i3s_names:
            zones += Zone.get_zones_from_i3s_file(i3s_name, threshold)

        with Serafin.Write(outname, 'fr', overwrite) as resout:
            output_header = resin.header
            resout.write_header(output_header)
            posB = output_header.var_IDs.index('B')

            for time_index, time in enumerate(resin.time):
                var = np.empty((output_header.nb_var, output_header.nb_nodes),
                               dtype=output_header.np_float_type)
                for i, var_ID in enumerate(output_header.var_IDs):
                    var[i, :] = resin.read_var_in_frame(time_index, var_ID)

                # Replace bottom locally
                nmodif = 0
                for i in range(
                        output_header.nb_nodes):  # iterate over all nodes
                    x, y = output_header.x[i], output_header.y[i]
                    pt = geo.Point(x, y)

                    for j, zone in enumerate(zones):
                        if zone.contains(pt):
                            # Current point is inside zone number j
                            #   and is between polylines a and b
                            print("node {} is in zone n°{}".format(i + 1, j))

                            # Replace value by a linear interpolation
                            z_int = zone.interpolate(pt)
                            print(z_int)
                            var[posB, i] = min(var[posB, i], z_int)

                            nmodif += 1
                            break

                resout.write_entire_frame(output_header, time, var)
                print("{} nodes were overwritten".format(nmodif))
Example #10
0
    def _run_synch_max(self, input_data):
        """!
        @brief Write Serafin with `SynchMax` operator
        @param input_data <slf.datatypes.SerafinData>: input SerafinData stream
        """
        selected_vars = [
            var for var in input_data.selected_vars
            if var in input_data.header.var_IDs
        ]
        output_header = input_data.header.copy()
        output_header.empty_variables()
        for var_ID in selected_vars:
            var_name, var_unit = input_data.selected_vars_names[var_ID]
            output_header.add_variable(var_ID, var_name, var_unit)
        if input_data.to_single:
            output_header.to_single_precision()

        nb_frames = len(input_data.selected_time_indicies)
        with Serafin.Read(input_data.filename,
                          input_data.language) as input_stream:
            input_stream.header = input_data.header
            input_stream.time = input_data.time

            calculator = operations.SynchMaxCalculator(
                input_stream, selected_vars, input_data.selected_time_indicies,
                input_data.metadata['var'])

            for i, time_index in enumerate(
                    input_data.selected_time_indicies[1:]):
                calculator.synch_max_in_frame(time_index)

                self.progress_bar.setValue(100 * (i + 1) / nb_frames)
                QApplication.processEvents()

            values = calculator.finishing_up()
            with Serafin.Write(self.filename, input_data.language,
                               True) as output_stream:
                output_stream.write_header(output_header)
                output_stream.write_entire_frame(output_header,
                                                 input_data.time[0], values)
        self.success('Output saved to {}.'.format(self.filename))
        return True
Example #11
0
 def _run_layer_selection(self, input_data):
     """!
     @brief Write Serafin with `Select Single Layer` operator
     @param input_data <slf.datatypes.SerafinData>: input SerafinData stream
     """
     output_header = input_data.build_2d_output_header()
     with Serafin.Read(input_data.filename,
                       input_data.language) as input_stream:
         input_stream.header = input_data.header
         input_stream.time = input_data.time
         with Serafin.Write(self.filename, input_data.language,
                            True) as output_stream:
             output_stream.write_header(output_header)
             for i, time_index in enumerate(
                     input_data.selected_time_indices):
                 # FIXME Optimization: Do calculations only on target layer and avoid reshaping afterwards
                 values = do_calculations_in_frame(
                     input_data.equations,
                     input_stream,
                     time_index,
                     input_data.selected_vars,
                     output_header.np_float_type,
                     is_2d=output_header.is_2d,
                     us_equation=input_data.us_equation)
                 new_shape = (values.shape[0],
                              input_stream.header.nb_planes,
                              values.shape[1] //
                              input_stream.header.nb_planes)
                 values_at_layer = values.reshape(
                     new_shape)[:, input_data.metadata['layer_selection'] -
                                1, :]
                 output_stream.write_entire_frame(
                     output_header, input_data.time[time_index],
                     values_at_layer)
                 self.progress_bar.setValue(
                     100 * (i + 1) / len(input_data.selected_time_indices))
                 QApplication.processEvents()
     self.success('Output saved to {}.'.format(self.filename))
     return True
Example #12
0
def slf_last(args):
    with Serafin.Read(args.in_slf, args.lang) as resin:
        resin.read_header()
        logger.info(resin.header.summary())
        resin.get_time()

        output_header = resin.header.copy()
        # Shift mesh coordinates if necessary
        if args.shift:
            output_header.transform_mesh(
                [Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)])

        # Toggle output file endianness if necessary
        if args.toggle_endianness:
            output_header.toggle_endianness()

        # Convert to single precision
        if args.to_single_precision:
            if resin.header.is_double_precision():
                output_header.to_single_precision()
            else:
                logger.warn(
                    'Input file is already single precision! Argument `--to_single_precision` is ignored'
                )

        values = np.empty((output_header.nb_var, output_header.nb_nodes),
                          dtype=output_header.np_float_type)
        with Serafin.Write(args.out_slf, args.lang,
                           overwrite=args.force) as resout:
            resout.write_header(output_header)

            time_index = len(resin.time) - 1
            time = resin.time[-1] if args.time is None else args.time

            for i, var_ID in enumerate(output_header.var_IDs):
                values[i, :] = resin.read_var_in_frame(time_index, var_ID)

            resout.write_entire_frame(output_header, time, values)
Example #13
0
def slf_sedi_chain(args):
    # Check that float parameters are positive (especially ws!)
    for arg in ('Cmud', 'ws', 'C', 'M'):
        if getattr(args, arg) < 0:
            logger.critical('The argument %s has to be positive' % args)
            sys.exit(1)

    with Serafin.Read(args.in_slf, args.lang) as resin:
        resin.read_header()
        logger.info(resin.header.summary())
        resin.get_time()

        us_equation = get_US_equation(args.friction_law)
        necessary_equations = get_necessary_equations(resin.header.var_IDs,
                                                      ['TAU'],
                                                      is_2d=True,
                                                      us_equation=us_equation)

        if resin.header.nb_frames < 1:
            logger.critical('The input file must have at least one frame!')
            sys.exit(1)

        output_header = resin.header.copy()
        # Shift mesh coordinates if necessary
        if args.shift:
            output_header.transform_mesh(
                [Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)])

        # Toggle output file endianness if necessary
        if args.toggle_endianness:
            output_header.toggle_endianness()

        # Convert to single precision
        if args.to_single_precision:
            if resin.header.is_double_precision():
                output_header.to_single_precision()
            else:
                logger.warn(
                    'Input file is already single precision! Argument `--to_single_precision` is ignored'
                )

        output_header.empty_variables()
        output_header.add_variable_from_ID('B')
        output_header.add_variable_from_ID('EV')

        with Serafin.Write(args.out_slf, args.lang,
                           overwrite=args.force) as resout:
            resout.write_header(output_header)

            prev_time = None
            prev_tau = None
            initial_bottom = resin.read_var_in_frame(0, 'B')
            bottom = copy(initial_bottom)
            for time_index, time in enumerate(resin.time):
                tau = do_calculations_in_frame(necessary_equations,
                                               resin,
                                               time_index, ['TAU'],
                                               output_header.np_float_type,
                                               is_2d=True,
                                               us_equation=us_equation,
                                               ori_values={})[0]
                if prev_time is not None:
                    dt = time - prev_time
                    mean_tau = (prev_tau + tau) / 2
                    if args.Tcd > 0:
                        bottom += args.Cmud * args.ws * args.C * \
                                  (1 - np.clip(mean_tau/args.Tcd, a_min=None, a_max=1.)) * dt
                    if args.Tce > 0:
                        bottom -= args.Cmud * args.M * (np.clip(
                            mean_tau / args.Tce, a_min=1., a_max=None) -
                                                        1.) * dt

                evol_bottom = bottom - initial_bottom
                resout.write_entire_frame(output_header, time,
                                          np.vstack((bottom, evol_bottom)))

                prev_time = time
                prev_tau = tau
Example #14
0
    def btnSubmitEvent(self):
        # fetch the list of selected variables
        selected_vars = self._getSelectedVariables()
        if not selected_vars:
            QMessageBox.critical(self, 'Error', 'Select at least one variable.',
                                 QMessageBox.Ok)
            return

        canceled, filename = save_dialog('Serafin', self.data.filename)
        if canceled:
            return

        # separate scalars and vectors
        scalars, vectors, additional_equations = operations.scalars_vectors(self.data.header.var_IDs, selected_vars)

        # get the operation type
        if self.maxButton.isChecked():
            max_min_type = operations.MAX
        elif self.minButton.isChecked():
            max_min_type = operations.MIN
        else:
            max_min_type = operations.MEAN

        # deduce header from selected variable IDs and write header
        output_header = self.getOutputHeader(scalars, vectors)

        start_index = int(self.timeSelection.startIndex.text()) - 1
        end_index = int(self.timeSelection.endIndex.text())
        time_indices = list(range(start_index, end_index))

        output_message = 'Computing %s of variables %s between frame %d and %d.' \
                          % ('Max' if self.maxButton.isChecked() else ('Min' if self.minButton.isChecked() else 'Mean'),
                             str(output_header.var_IDs), start_index+1, end_index)
        self.parent.inDialog()
        logging.info(output_message)
        progressBar = OutputProgressDialog()

        # do some calculations
        try:
            with Serafin.Read(self.data.filename, self.data.language) as input_stream:
                input_stream.header = self.data.header
                input_stream.time = self.data.time

                progressBar.setValue(5)
                QApplication.processEvents()

                with Serafin.Write(filename, self.data.language) as output_stream:
                    process = MaxMinMeanThread(max_min_type, input_stream, scalars, vectors, time_indices,
                                               additional_equations)
                    progressBar.connectToThread(process)
                    values = process.run()

                    if not process.canceled:
                        output_stream.write_header(output_header)
                        output_stream.write_entire_frame(output_header, self.data.time[0], values)
                        progressBar.outputFinished()
        except (Serafin.SerafinRequestError, Serafin.SerafinValidationError) as e:
            QMessageBox.critical(None, 'Serafin Error', e.message, QMessageBox.Ok, QMessageBox.Ok)
            return

        progressBar.exec_()
        self.parent.outDialog()
Example #15
0
    def _run_project_mesh(self, first_input):
        """!
        @brief Write Serafin with `Projet Mesh` operator
        @param input_data <slf.datatypes.SerafinData>: input SerafinData stream
        """
        operation_type = first_input.operator
        second_input = first_input.metadata['operand']

        if second_input.filename == self.filename:
            self.fail('cannot overwrite to the input file.')
            return

        # common vars
        first_vars = [
            var for var in first_input.header.var_IDs
            if var in first_input.selected_vars
        ]
        second_vars = [
            var for var in second_input.header.var_IDs
            if var in second_input.selected_vars
        ]
        common_vars = []
        for var in first_vars:
            if var in second_vars:
                common_vars.append(var)
        if not common_vars:
            self.fail('the two input files do not share common variables.')
            return False

        # common frames
        first_frames = [
            first_input.start_time + first_input.time_second[i]
            for i in first_input.selected_time_indices
        ]
        second_frames = [
            second_input.start_time + second_input.time_second[i]
            for i in second_input.selected_time_indices
        ]
        common_frames = []
        for first_index, first_frame in zip(first_input.selected_time_indices,
                                            first_frames):
            for second_index, second_frame in zip(
                    second_input.selected_time_indices, second_frames):
                if first_frame == second_frame:
                    common_frames.append((first_index, second_index))
        if not common_frames:
            self.fail('the two input files do not share common time frames.')
            return False

        # construct output header
        output_header = first_input.header.copy()
        output_header.empty_variables()
        for var in common_vars:
            name, unit = first_input.selected_vars_names[var]
            output_header.add_variable(var, name, unit)
        if first_input.to_single:
            output_header.to_single_precision()

        # map points of A onto mesh B
        mesh = MeshInterpolator(second_input.header, False)

        if second_input.triangles:
            mesh.index = second_input.index
            mesh.triangles = second_input.triangles
        else:
            self.construct_mesh(mesh)
            second_input.index = mesh.index
            second_input.triangles = mesh.triangles

        is_inside, point_interpolators = mesh.get_point_interpolators(
            list(zip(first_input.header.x, first_input.header.y)))

        # run the calculator
        with Serafin.Read(first_input.filename,
                          first_input.language) as first_in:
            first_in.header = first_input.header
            first_in.time = first_input.time

            with Serafin.Read(second_input.filename,
                              second_input.language) as second_in:
                second_in.header = second_input.header
                second_in.time = second_input.time

                calculator = operations.ProjectMeshCalculator(
                    first_in, second_in, common_vars, is_inside,
                    point_interpolators, common_frames, operation_type)

                with Serafin.Write(self.filename, first_input.language,
                                   True) as out_stream:
                    out_stream.write_header(output_header)

                    for i, (first_time_index, second_time_index) in enumerate(
                            calculator.time_indices):
                        values = calculator.operation_in_frame(
                            first_time_index, second_time_index)
                        out_stream.write_entire_frame(
                            output_header,
                            calculator.first_in.time[first_time_index], values)

                        self.progress_bar.setValue(100 * (i + 1) /
                                                   len(common_frames))
                        QApplication.processEvents()

        self.success(
            'Output saved to {}.\nThe two files has {} common variables and {} common frames.\n'
            'The mesh A has {} / {} nodes inside the mesh B.'.format(
                self.filename, len(common_vars), len(common_frames),
                sum(is_inside), first_input.header.nb_nodes))
        return True
Example #16
0
    def _run_arrival_duration(self, input_data):
        """!
        @brief Write Serafin with `Compute Arrival Duration` operator
        @param input_data <slf.datatypes.SerafinData>: input SerafinData stream
        """
        conditions, table, time_unit = input_data.metadata['conditions'], \
                                       input_data.metadata['table'], input_data.metadata['time unit']

        output_header = input_data.header.copy()
        output_header.empty_variables()
        for row in range(len(table)):
            a_name = table[row][1]
            d_name = table[row][2]
            for name in [a_name, d_name]:
                output_header.add_variable_str('', name, time_unit.upper())
        if input_data.to_single:
            output_header.to_single_precision()

        with Serafin.Read(input_data.filename,
                          input_data.language) as input_stream:
            input_stream.header = input_data.header
            input_stream.time = input_data.time
            calculators = []

            for i, condition in enumerate(conditions):
                calculators.append(
                    operations.ArrivalDurationCalculator(
                        input_stream, input_data.selected_time_indices,
                        condition))
            for i, index in enumerate(input_data.selected_time_indices[1:]):
                for calculator in calculators:
                    calculator.arrival_duration_in_frame(index)

                self.progress_bar.setValue(
                    100 * (i + 1) / len(input_data.selected_time_indices))
                QApplication.processEvents()

            values = np.empty(
                (2 * len(conditions), input_data.header.nb_nodes))
            for i, calculator in enumerate(calculators):
                values[2 * i, :] = calculator.arrival
                values[2 * i + 1, :] = calculator.duration

            if time_unit == 'minute':
                values /= 60
            elif time_unit == 'hour':
                values /= 3600
            elif time_unit == 'day':
                values /= 86400
            elif time_unit == 'percentage':
                values *= 100 / (
                    input_data.time[input_data.selected_time_indices[-1]] -
                    input_data.time[input_data.selected_time_indices[0]])

            with Serafin.Write(self.filename, input_data.language,
                               True) as output_stream:
                output_stream.write_header(output_header)
                output_stream.write_entire_frame(output_header,
                                                 input_data.time[0], values)
        self.success('Output saved to {}.'.format(self.filename))
        return True
Example #17
0
    def export_mesh(self, path, lang='en'):
        """
        Export generated mesh in slf, t3s or LandXML
        TODO: export multiple variables in t3s and LandXML
        """
        logger.info("~> Write generated mesh")

        nnode, nelem = len(self.triangle['vertices']), len(
            self.triangle['triangles'])
        if path.endswith('.t3s'):
            with open(path, 'w', newline='') as fileout:
                # Write header
                date = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime())
                fileout.write(
                    """#########################################################################
:FileType t3s  ASCII  EnSim 1.0
# Canadian Hydraulics Centre/National Research Council (c) 1998-2012
# DataType                 2D T3 Scalar Mesh
#
:Application              BlueKenue
:Version                  3.3.4
:WrittenBy                TatooineMesher
:CreationDate             {}
#
#------------------------------------------------------------------------
#
:NodeCount {}
:ElementCount {}
:ElementType  T3
#
:EndHeader
""".format(date, nnode, nelem))

            with open(path, mode='ab') as fileout:
                # Table with x, y, z coordinates
                np.savetxt(fileout,
                           np.column_stack(
                               (self.triangle['vertices'],
                                self.interp_values_from_geom()[0, :])),
                           delimiter=' ',
                           fmt='%.{}f'.format(DIGITS))

                # Table with elements (connectivity)
                np.savetxt(fileout,
                           self.triangle['triangles'] + 1,
                           delimiter=' ',
                           fmt='%i')

        elif path.endswith('.xml'):
            env = Environment(loader=FileSystemLoader(
                os.path.join(os.path.dirname(os.path.realpath(__file__)),
                             'data')))
            template = env.get_template("LandXML_template.xml")
            template_render = template.render(nodes=np.round(
                np.column_stack((self.triangle['vertices'],
                                 self.interp_values_from_geom()[0, :])),
                DIGITS),
                                              ikle=self.triangle['triangles'] +
                                              1)

            # Write XML file
            with open(path, 'w') as fileout:
                fileout.write(template_render)

        elif path.endswith('.slf'):

            with Serafin.Write(path, lang, overwrite=True) as resout:
                output_header = Serafin.SerafinHeader(
                    title='%s (Written by TatooineMesher)' %
                    os.path.basename(path),
                    lang=lang)
                output_header.from_triangulation(
                    self.triangle['vertices'], self.triangle['triangles'] + 1)

                for var in self.var_names():
                    if var in basic_2D_vars_IDs:
                        output_header.add_variable_from_ID(var)
                    else:
                        output_header.add_variable_str(var, var, '')
                resout.write_header(output_header)

                resout.write_entire_frame(output_header, 0.0,
                                          self.interp_values_from_geom())

        else:
            raise NotImplementedError(
                "Only slf, t3s and xml formats are supported for the output mesh"
            )
Example #18
0
def slf_max_over_files(args):
    if args.vars is None:
        with Serafin.Read(args.in_slfs[0], args.lang) as resin:
            resin.read_header()
            var_IDs = resin.header.var_IDs if args.vars is None else args.vars
    else:
        var_IDs = args.vars

    if args.operation == 'max':
        fun = np.maximum
    elif args.operation == 'min':
        fun = np.minimum
    else:
        raise NotImplementedError

    # Read polygons
    if args.in_polygons is not None:
        if not args.in_polygons.endswith('.shp'):
            logger.critical('File "%s" is not a shp file.' % args.in_polygons)
            sys.exit(3)
        polygons = []
        try:
            for polygon in Shapefile.get_polygons(args.in_polygons):
                polygons.append(polygon)
        except ShapefileException as e:
            logger.error(e)
            sys.exit(3)

        if not polygons:
            logger.error('The file does not contain any polygon.')
            sys.exit(1)
        logger.info('The file contains {} polygon{}.'.format(len(polygons), 's' if len(polygons) > 1 else ''))
    else:
        polygons = None

    output_header = None
    out_values = None  # min or max values
    mask_nodes = None
    for i, in_slf in enumerate(args.in_slfs):
        with Serafin.Read(in_slf, args.lang) as resin:
            resin.read_header()
            logger.info(resin.header.summary())
            if not resin.header.is_2d:
                logger.critical('The file has to be a 2D Serafin!')
                sys.exit(3)
            resin.get_time()

            for var_ID in var_IDs:
                if var_ID not in resin.header.var_IDs:
                    logger.critical('The variable %s is missing in %s' % (var_ID, in_slf))
                    sys.exit(3)

            if i == 0:
                output_header = resin.header.copy()
                output_header.empty_variables()
                for var_ID in var_IDs:
                    output_header.add_variable_from_ID(var_ID)
                out_values = np.empty((output_header.nb_var, output_header.nb_nodes),
                                      dtype=output_header.np_float_type)
                if polygons is not None:
                    mask_nodes = np.zeros(output_header.nb_nodes, dtype=bool)
                    for idx_node, (x, y) in enumerate(zip(output_header.x, output_header.y)):
                        point = Point(x, y)
                        for polygon in polygons:
                            if polygon.contains(point):
                                mask_nodes[idx_node] = True
                                break
                    logger.info('Number of nodes inside polygon(s): %i (over %i)'
                                % (mask_nodes.sum(), output_header.nb_nodes))
                else:
                    mask_nodes = np.ones(output_header.nb_nodes, dtype=bool)
            else:
                if not resin.header.same_2d_mesh(output_header):
                    logger.critical('The mesh of %s is different from the first one' % in_slf)
                    sys.exit(1)

            for time_index, time in enumerate(resin.time):
                for j, var_ID in enumerate(var_IDs):
                    values = resin.read_var_in_frame(time_index, var_ID)
                    if time_index == 0 and i == 0:
                        out_values[j, :] = values
                    else:
                        out_values[j, mask_nodes] = fun(out_values[j, mask_nodes], values[mask_nodes])

    with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout:
        resout.write_header(output_header)
        resout.write_entire_frame(output_header, 0.0, out_values)
def mesh_mascaret_run(args):
    set_logger_level(args.verbose)
    t1 = perf_counter()

    masc_geo = MascaretGeoFile(args.infile_geo)
    logger.info("Read %s " % masc_geo)
    # masc_geo.export_shp_lines(args.infile_geo.replace('.georef', '.shp'))
    if not masc_geo.has_ref:
        raise TatooineException(
            "The file `%s` does not contain any georeferenced data" %
            masc_geo.file_name)

    global_mesh_constr = MeshConstructor()

    for reach_id, reach in masc_geo.reaches.items():
        logger.info(reach)
        section_seq = CrossSectionSequence()

        dist_proj_axe = 0.0
        prev_x, prev_y = 0.0, 0.0
        for section_idx, masc_section in enumerate(reach):
            section = CrossSection(
                masc_section.id,
                [(x, y) for x, y in zip(masc_section.x, masc_section.y)],
                "Cross-section")

            section.coord.values = np.core.records.fromarrays(
                np.column_stack((masc_section.z, )).T,
                names=VARIABLES_FROM_GEOMETRY)
            x, y = masc_section.axis
            if section_idx != 0:
                dist_proj_axe += sqrt((x - prev_x)**2 + (y - prev_y)**2)

            section.dist_proj_axe = dist_proj_axe
            prev_x, prev_y = x, y

            section_seq.add_section(section)

        if len(section_seq) >= 2:
            section_seq.check_intersections()
            # section_seq.sort_by_dist() is useless because cross-sections are already sorted
            constraint_lines = ConstraintLine.get_lines_and_set_limits_from_sections(
                section_seq, args.interp_constraint_lines)

            mesh_constr = MeshConstructor(section_seq=section_seq,
                                          lat_step=args.lat_step,
                                          nb_pts_lat=args.nb_pts_lat,
                                          interp_values=args.interp_values)
            mesh_constr.build_interp(constraint_lines, args.long_step,
                                     args.constant_long_disc)
            mesh_constr.build_mesh(in_floworiented_crs=True)

            global_mesh_constr.append_mesh_constr(mesh_constr)
        else:
            logger.error(
                "/!\\ Reach %s ignored because it does not contain at least 2 sections"
                % reach_id)

    if len(global_mesh_constr.points) == 0:
        raise ExceptionCrue10("No node in the generated mesh!")

    logger.info(global_mesh_constr.summary()
                )  # General information about the merged mesh

    if args.infile_res:
        masc_res = MascaretFile(args.infile_res)
        masc_res.get_reaches()
        nb_section_in_geom = masc_geo.nsections
        if masc_res.nsections != nb_section_in_geom:
            raise TatooineException(
                "The number of sections is different between geometry (%i) and results file (%i)"
                % (nb_section_in_geom, masc_res.nsections))

        varnames_1d = masc_res.varnames_dict['abbr']
        logger.info("Variables 1D available at sections: %s" % varnames_1d)
        try:
            pos_z = varnames_1d.index('Z')
        except ValueError:
            raise TatooineException(
                "The variable Z must be present in the results file")

        additional_variables_id = ['H']

        values_geom = global_mesh_constr.interp_values_from_geom()
        z_bottom = values_geom[0, :]
        with Serafin.Write(args.outfile_mesh, args.lang,
                           overwrite=True) as resout:
            title = '%s (written by TatooineMesher)' % os.path.basename(
                args.outfile_mesh)
            output_header = Serafin.SerafinHeader(title=title, lang=args.lang)
            output_header.from_triangulation(
                global_mesh_constr.triangle['vertices'],
                global_mesh_constr.triangle['triangles'] + 1)
            for var_name in VARIABLES_FROM_GEOMETRY:
                if var_name == 'B':
                    output_header.add_variable_from_ID(var_name)
                else:
                    output_header.add_variable_str(var_name, var_name, '')
            for var_id in additional_variables_id:
                output_header.add_variable_from_ID(var_id)
            for var_name in varnames_1d:
                output_header.add_variable_str(var_name, var_name, '')
            resout.write_header(output_header)

            for idx_time, time in enumerate(masc_res.times):
                variables_at_sections = masc_res.get_values(idx_time)[reach.id]

                # Interpolate between sections and set in casiers
                values_res = global_mesh_constr.interp_values_from_res(
                    variables_at_sections, None, pos_z)

                # Compute water depth: H = Z - Zf and clip below 0m (avoid negative values)
                depth = np.clip(values_res[pos_z, :] - z_bottom,
                                a_min=0.0,
                                a_max=None)

                values = np.vstack((values_geom, depth, values_res))
                resout.write_entire_frame(output_header, time, values)

    else:
        # Write a single frame with only variables from geometry
        global_mesh_constr.export_mesh(args.outfile_mesh, lang=args.lang)

    t2 = perf_counter()
    logger.info("=> Execution time: {}s".format(t2 - t1))
Example #20
0
def bottom(args):
    if args.operations is None:
        args.operations = ['set'] * len(args.in_i3s_paths)
    if len(args.in_i3s_paths) != len(args.operations):
        raise RuntimeError

    # global prev_line, zones, np_coord, Xt, Z, ref_rows, polyline
    with Serafin.Read(args.in_slf, 'fr') as resin:
        resin.read_header()

        if not resin.header.is_2d:
            sys.exit("The current script is working only with 2D meshes !")

        resin.get_time()

        # Define zones from polylines
        zones = []
        for i3s_path, operator_str in zip(args.in_i3s_paths, args.operations):
            zones += Zone.get_zones_from_i3s_file(i3s_path, args.threshold, operator_str)

        with Serafin.Write(args.out_slf, 'fr', args.force) as resout:
            output_header = resin.header
            resout.write_header(output_header)
            pos_B = output_header.var_IDs.index('B')

            for time_index, time in enumerate(resin.time):
                var = np.empty((output_header.nb_var, output_header.nb_nodes), dtype=output_header.np_float_type)
                for i, var_ID in enumerate(output_header.var_IDs):
                    var[i, :] = resin.read_var_in_frame(time_index, var_ID)

                # Replace bottom locally
                nmodif = 0
                for i in range(output_header.nb_nodes):  # iterate over all nodes
                    x, y = output_header.x[i], output_header.y[i]
                    pt = geo.Point(x, y)
                    old_z = var[pos_B, i]

                    found = False
                    # Check if it is inside a zone
                    for j, zone in enumerate(zones):
                        if zone.contains(pt):
                            # Current point is inside zone number j and is between polylines a and b
                            z_int = zone.interpolate(pt)
                            new_z = zone.operator(z_int, old_z)
                            var[pos_B, i] = new_z

                            print("BOTTOM at node {} (zone n°{}) {} to {} (dz={})".format(
                                i + 1, j, operator_str, new_z, new_z - old_z
                            ))

                            nmodif += 1
                            found = True
                            break

                    if not found and args.rescue_distance > 0.0:
                        # Try to rescue some very close nodes
                        for j, zone in enumerate(zones):
                            if zone.polygon.distance(pt) < args.rescue_distance:
                                pt_projected = zone.get_closest_point(pt)

                                # Replace value by a linear interpolation
                                z_int = zone.interpolate(pt_projected)
                                new_z = zone.operator(z_int, old_z)
                                var[pos_B, i] = new_z

                                print("BOTTOM at node {} (zone n°{}, rescued) {} to {} (dz={})".format(
                                    i + 1, j, operator_str, new_z, new_z - old_z
                                ))

                                nmodif += 1
                                break

                resout.write_entire_frame(output_header, time, var)
                print("{} nodes were overwritten".format(nmodif))
Example #21
0
def slf_3d_to_2d(args):
    with Serafin.Read(args.in_slf, args.lang) as resin:
        resin.read_header()
        logger.info(resin.header.summary())
        resin.get_time()

        if resin.header.is_2d:
            logger.critical('The input file is not 3D.')
            sys.exit(1)
        if 'Z' not in resin.header.var_IDs:
            logger.critical('The elevation variable Z is not found in the Serafin file.')
            sys.exit(1)
        if args.layer is not None:
            upper_plane = resin.header.nb_planes
            if args.layer < 1 or args.layer > upper_plane:
                logger.critical('Layer has to be in [1, %i]' % upper_plane)
                sys.exit(1)

        output_header = resin.header.copy_as_2d()
        # Shift mesh coordinates if necessary
        if args.shift:
            output_header.transform_mesh([Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)])

        # Toggle output file endianness if necessary
        if args.toggle_endianness:
            output_header.toggle_endianness()

        # Convert to single precision
        if args.to_single_precision:
            if resin.header.is_double_precision():
                output_header.to_single_precision()
            else:
                logger.warn('Input file is already single precision! Argument `--to_single_precision` is ignored')

        if args.aggregation is not None:
            if args.aggregation == 'max':
                operation_type = operations.MAX
            elif args.aggregation == 'min':
                operation_type = operations.MIN
            else:  # args.aggregation == 'mean'
                 operation_type = operations.MEAN
            selected_vars = [var for var in output_header.iter_on_all_variables()]
            vertical_calculator = operations.VerticalMaxMinMeanCalculator(operation_type, resin, output_header,
                                                                          selected_vars, args.vars)
            output_header.set_variables(vertical_calculator.get_variables())  # sort variables

        # Add some elevation variables
        for var_ID in args.vars:
            output_header.add_variable_from_ID(var_ID)

        with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout:
            resout.write_header(output_header)

            vars_2d = np.empty((output_header.nb_var, output_header.nb_nodes_2d), dtype=output_header.np_float_type)
            for time_index, time in enumerate(tqdm(resin.time, unit='frame')):
                if args.aggregation is not None:
                    vars_2d = vertical_calculator.max_min_mean_in_frame(time_index)
                else:
                    for i, var in enumerate(output_header.var_IDs):
                        vars_2d[i, :] = resin.read_var_in_frame_as_3d(time_index, var)[args.layer - 1, :]
                resout.write_entire_frame(output_header, time, vars_2d)
Example #22
0
def slf_bottom_friction(args):
    # Check argument consistency
    if args.in_strickler_zones is not None or args.in_strickler_attr is not None:
        if args.in_strickler_zones is None or args.in_strickler_attr is None:
            logger.critical(
                'Both arguments `--in_strickler_zones` and `--in_strickler_attr` have to be defined.'
            )
            sys.exit(2)

    # Read polygons to compute volume
    if not args.in_polygons.endswith('.shp'):
        logger.critical('File "%s" is not a shp file.' % args.in_polygons)
        sys.exit(3)
    polygons = []
    try:
        for polygon in Shapefile.get_polygons(args.in_polygons):
            polygons.append(polygon)
    except ShapefileException as e:
        logger.error(e)
        sys.exit(3)

    if not polygons:
        logger.error('The file does not contain any polygon.')
        sys.exit(1)
    logger.debug('The file contains {} polygon{}.'.format(
        len(polygons), 's' if len(polygons) > 1 else ''))

    names = ['Polygon %d' % (i + 1) for i in range(len(polygons))]

    varIDs = ['US', 'TAU']
    out_varIDs = ['W'] + varIDs
    pos_TAU = out_varIDs.index('TAU')
    with Serafin.Read(args.in_slf, args.lang) as resin:
        resin.read_header()
        if not resin.header.is_2d:
            logger.critical('The file has to be a 2D Serafin!')
            sys.exit(3)

        in_varIDs = resin.header.var_IDs

        # Compute Strickler values if necessary
        ori_values = {}
        if args.in_strickler_zones is not None:
            if not args.in_strickler_zones.endswith('.shp'):
                logger.critical('File "%s" is not a shp file.' %
                                args.in_strickler_zones)
                sys.exit(3)

            attributes = Shapefile.get_numeric_attribute_names(
                args.in_strickler_zones)
            try:
                index_attr = [attr for _, attr in attributes
                              ].index(args.in_strickler_attr)
            except ValueError:
                logger.critical('Attribute "%s" is not found.' %
                                args.in_strickler_attr)
                sys.exit(1)

            strickler_zones = []
            try:
                for zone in Shapefile.get_polygons(args.in_strickler_zones):
                    strickler_zones.append(zone)
            except ShapefileException as e:
                logger.error(e)
                sys.exit(3)

            if not strickler_zones:
                logger.error('The file does not contain any friction zone.')
                sys.exit(1)

            logger.debug('Recomputing friction coefficient values from zones')
            friction_coeff = np.full(
                resin.header.nb_nodes_2d,
                0.0)  # default value for nodes not included in any zone
            for i, (x, y) in enumerate(
                    zip(tqdm(resin.header.x), tqdm(resin.header.y))):
                point = Point(x, y)
                for zone in strickler_zones:
                    if zone.contains(point):
                        friction_coeff[i] = zone.attributes()[index_attr]
                        exit
            in_varIDs.append('W')
            ori_values['W'] = friction_coeff
        else:
            if 'W' not in resin.header.varIDs:
                logger.critical('The variable W is missing.')
                sys.exit(1)

        us_equation = None
        if args.friction_law:
            us_equation = get_US_equation(args.friction_law)

        resin.get_time()
        necessary_equations = get_necessary_equations(in_varIDs,
                                                      out_varIDs,
                                                      is_2d=True,
                                                      us_equation=us_equation)

        calculator = VolumeCalculator(VolumeCalculator.NET, 'TAU', None, resin,
                                      names, polygons, 1)
        calculator.construct_triangles(tqdm)
        calculator.construct_weights(tqdm)

        output_header = resin.header.copy()
        output_header.empty_variables()
        for var_ID in out_varIDs:
            output_header.add_variable_from_ID(var_ID)

        with Serafin.Write(args.out_slf, args.lang, args.force) as resout:
            resout.write_header(output_header)

            mode = 'w' if args.force else 'x'
            with open(args.out_csv, mode, newline='') as csvfile:
                csvwriter = csv.writer(csvfile, delimiter=args.sep)
                csvwriter.writerow(['time'] + names)

                for time_index, time in enumerate(tqdm(resin.time)):
                    values = do_calculations_in_frame(
                        necessary_equations,
                        resin,
                        time_index,
                        out_varIDs,
                        resin.header.np_float_type,
                        is_2d=True,
                        us_equation=STRICKLER_EQUATION,
                        ori_values=ori_values)
                    resout.write_entire_frame(output_header, time, values)

                    row = [time]
                    for j in range(len(calculator.polygons)):
                        weight = calculator.weights[j]
                        volume = calculator.volume_in_frame_in_polygon(
                            weight, values[pos_TAU], calculator.polygons[j])
                        row.append(volume)
                    csvwriter.writerow(row)
def mesh_crue10_run(args):
    set_logger_level(args.verbose)
    t1 = perf_counter()

    # Read the model and its submodels from xml/shp files
    etude = Etude(args.infile_etu)
    modele = etude.get_modele(args.model_name)
    modele.read_all()
    logger.info(modele)
    for sous_modele in modele.liste_sous_modeles:
        sous_modele.remove_sectioninterpolee()
        sous_modele.normalize_geometry()
        logger.info(sous_modele.summary())
        # sous_modele.write_shp_limites_lits_numerotes('limites_lits.shp')  # DEBUG
    logger.info(modele)

    global_mesh_constr = MeshConstructor()

    # Handle branches in minor bed
    for i, branche in enumerate(modele.get_liste_branches()):
        # Ignore branch if branch_patterns is set and do not match with current branch name
        if args.branch_patterns is not None:
            ignore = True
            for pattern in args.branch_patterns:
                if pattern in branche.id:
                    ignore = False
                    break
        else:
            ignore = False

        if branche.type not in args.branch_types_filter or not branche.is_active:
            ignore = True

        if not ignore:
            logger.info("===== TRAITEMENT DE LA BRANCHE %s =====" % branche.id)
            axe = branche.geom
            try:
                section_seq = CrossSectionSequence()
                for crue_section in branche.liste_sections_dans_branche:
                    if isinstance(crue_section, SectionProfil):
                        coords = list(crue_section.get_coord(add_z=True))
                        section = CrossSection(crue_section.id,
                                               [(coord[0], coord[1])
                                                for coord in coords],
                                               'Section')

                        # Determine some variables (constant over the simulation) from the geometry
                        z = np.array([coord[2] for coord in coords])
                        is_bed_active = crue_section.get_is_bed_active_array()
                        mean_strickler = crue_section.get_friction_coeff_array(
                        )
                        section.coord.values = np.core.records.fromarrays(
                            np.column_stack(
                                (z, is_bed_active, mean_strickler)).T,
                            names=VARIABLES_FROM_GEOMETRY)

                        section_seq.add_section(section)

                section_seq.compute_dist_proj_axe(axe, args.dist_max)
                if len(section_seq) >= 2:
                    section_seq.check_intersections()
                    # section_seq.sort_by_dist() is useless because profiles are already sorted
                    constraint_lines = ConstraintLine.get_lines_and_set_limits_from_sections(
                        section_seq, args.interp_constraint_lines)

                    mesh_constr = MeshConstructor(
                        section_seq=section_seq,
                        lat_step=args.lat_step,
                        nb_pts_lat=args.nb_pts_lat,
                        interp_values=args.interp_values)
                    mesh_constr.build_interp(constraint_lines, args.long_step,
                                             args.constant_long_disc)
                    mesh_constr.build_mesh(in_floworiented_crs=True)

                    global_mesh_constr.append_mesh_constr(mesh_constr)
                else:
                    logger.warning("Branche ignorée par manque de sections")
            except TatooineException as e:
                logger.error(
                    "/!\\ Branche ignorée à cause d'une erreur bloquante :")
                logger.error(e.message)
            logger.info("\n")

    # Handle casiers in floodplain
    nb_casiers = len(modele.get_liste_casiers())
    if args.infile_dem and nb_casiers > 0:
        logger.info("===== TRAITEMENT DES CASIERS =====")

        if not os.path.exists(args.infile_dem):
            raise TatooineException("File not found: %s" % args.infile_dem)
        from gdal import Open
        raster = Open(args.infile_dem)
        dem_interp = interp_raster(raster)

        floodplain_step = args.floodplain_step if not None else args.long_step
        max_elem_area = floodplain_step * floodplain_step / 2.0
        simplify_dist = floodplain_step / 2.0

        for i, casier in enumerate(modele.get_liste_casiers()):
            if casier.is_active:
                if casier.geom is None:
                    raise TatooineException(
                        "Geometry of %s could not be found" % casier)
                line = casier.geom.simplify(simplify_dist)
                if not line.is_closed:
                    raise RuntimeError
                coords = resample_2d_line(
                    line.coords,
                    floodplain_step)[1:]  # Ignore last duplicated node

                hard_nodes_xy = np.array(coords, dtype=np.float)
                hard_nodes_idx = np.arange(0, len(hard_nodes_xy), dtype=np.int)
                hard_segments = np.column_stack(
                    (hard_nodes_idx, np.roll(hard_nodes_idx, 1)))

                tri = {
                    'vertices':
                    np.array(
                        np.column_stack(
                            (hard_nodes_xy[:, 0], hard_nodes_xy[:, 1]))),
                    'segments':
                    hard_segments,
                }
                triangulation = triangle.triangulate(tri,
                                                     opts='qpa%f' %
                                                     max_elem_area)

                nodes_xy = np.array(triangulation['vertices'], dtype=np.float)
                bottom = dem_interp(nodes_xy)
                points = unstructured_to_structured(np.column_stack(
                    (nodes_xy, bottom)),
                                                    names=['X', 'Y', 'Z'])

                global_mesh_constr.add_floodplain_mesh(triangulation, points)

    if len(global_mesh_constr.points) == 0:
        raise ExceptionCrue10(
            "Aucun point à traiter, adaptez l'option `--branch_patterns` et/ou `--branch_types_filter`"
        )

    logger.info(global_mesh_constr.summary()
                )  # General information about the merged mesh

    if args.infile_rcal:
        # Read rcal result file
        results = RunResults(args.infile_rcal)
        logger.info(results.summary())

        # Check result consistency
        missing_sections = modele.get_missing_active_sections(
            results.emh['Section'])
        if missing_sections:
            raise ExceptionCrue10("Sections manquantes :\n%s" %
                                  missing_sections)

        # Subset results to get requested variables at active sections
        varnames_1d = results.variables['Section']
        logger.info("Variables 1D disponibles aux sections: %s" % varnames_1d)
        try:
            pos_z = varnames_1d.index('Z')
        except ValueError:
            raise TatooineException(
                "La variable Z doit être présente dans les résultats aux sections"
            )
        if global_mesh_constr.has_floodplain:
            try:
                pos_z_fp = results.variables['Casier'].index('Z')
            except ValueError:
                raise TatooineException(
                    "La variable Z doit être présente dans les résultats aux casiers"
                )
        else:
            pos_z_fp = None

        pos_variables = [
            results.variables['Section'].index(var) for var in varnames_1d
        ]
        pos_sections_list = [
            results.emh['Section'].index(profil.id)
            for profil in global_mesh_constr.section_seq
        ]
        if global_mesh_constr.has_floodplain:
            pos_casiers_list = [
                results.emh['Casier'].index(casier.id)
                for casier in modele.get_liste_casiers() if casier.is_active
            ]
        else:
            pos_casiers_list = []

        additional_variables_id = ['H']
        if 'Vact' in varnames_1d:
            additional_variables_id.append('M')

        values_geom = global_mesh_constr.interp_values_from_geom()
        z_bottom = values_geom[0, :]
        with Serafin.Write(args.outfile_mesh, args.lang,
                           overwrite=True) as resout:
            title = '%s (written by TatooineMesher)' % os.path.basename(
                args.outfile_mesh)
            output_header = Serafin.SerafinHeader(title=title, lang=args.lang)
            output_header.from_triangulation(
                global_mesh_constr.triangle['vertices'],
                global_mesh_constr.triangle['triangles'] + 1)
            for var_name in VARIABLES_FROM_GEOMETRY:
                if var_name in ['B', 'W']:
                    output_header.add_variable_from_ID(var_name)
                else:
                    output_header.add_variable_str(var_name, var_name, '')
            for var_id in additional_variables_id:
                output_header.add_variable_from_ID(var_id)
            for var_name in varnames_1d:
                output_header.add_variable_str(var_name, var_name, '')
            resout.write_header(output_header)

            if args.calc_unsteady is None:
                for i, calc_name in enumerate(results.calc_steady_dict.keys()):
                    logger.info("~> Calcul permanent %s" % calc_name)
                    # Read a single *steady* calculation
                    res_steady = results.get_res_steady(calc_name)
                    variables_at_profiles = res_steady['Section'][
                        pos_sections_list, :][:, pos_variables]
                    if global_mesh_constr.has_floodplain:
                        z_at_casiers = res_steady['Casier'][pos_casiers_list,
                                                            pos_z_fp]
                    else:
                        z_at_casiers = None

                    # Interpolate between sections and set in casiers
                    values_res = global_mesh_constr.interp_values_from_res(
                        variables_at_profiles, z_at_casiers, pos_z)

                    # Compute water depth: H = Z - Zf and clip below 0m (avoid negative values)
                    depth = np.clip(values_res[pos_z, :] - z_bottom,
                                    a_min=0.0,
                                    a_max=None)

                    # Merge and write values
                    if 'Vact' in varnames_1d:
                        # Compute velocity magnitude from Vact and apply mask "is active bed"
                        velocity = values_res[
                            varnames_1d.index('Vact'), :] * values_geom[1, :]
                        values = np.vstack(
                            (values_geom, depth, velocity, values_res))
                    else:
                        values = np.vstack((values_geom, depth, values_res))

                    resout.write_entire_frame(output_header, 3600.0 * i,
                                              values)

            else:
                calc_unsteady = results.get_calc_unsteady(args.calc_unsteady)
                logger.info("Calcul transitoire %s" % args.calc_unsteady)
                res_unsteady = results.get_res_unsteady(args.calc_unsteady)

                for i, (time, _) in enumerate(calc_unsteady.frame_list):
                    logger.info("~> %fs" % time)
                    res_at_sections = res_unsteady['Section'][i, :, :]
                    variables_at_profiles = res_at_sections[
                        pos_sections_list, :][:, pos_variables]
                    if global_mesh_constr.has_floodplain:
                        z_at_casiers = res_unsteady['Casier'][i,
                                                              pos_casiers_list,
                                                              pos_z_fp]
                    else:
                        z_at_casiers = None

                    # Interpolate between sections
                    values_res = global_mesh_constr.interp_values_from_res(
                        variables_at_profiles, z_at_casiers, pos_z)

                    # Compute water depth: H = Z - Zf and clip below 0m (avoid negative values)
                    depth = np.clip(values_res[pos_z, :] - z_bottom,
                                    a_min=0.0,
                                    a_max=None)

                    # Merge and write values
                    if 'Vact' in varnames_1d:
                        # Compute velocity magnitude from Vact and apply mask "is active bed"
                        velocity = values_res[
                            varnames_1d.index('Vact'), :] * values_geom[1, :]
                        values = np.vstack(
                            (values_geom, depth, velocity, values_res))
                    else:
                        values = np.vstack((values_geom, depth, values_res))

                    resout.write_entire_frame(output_header, time, values)

    else:
        # Write a single frame with only variables from geometry
        global_mesh_constr.export_mesh(args.outfile_mesh, lang=args.lang)

    t2 = perf_counter()
    logger.info("=> Execution time: {}s".format(t2 - t1))
Example #24
0
    def btnSubmitEvent(self):
        if self.secondTable.rowCount() == 0:
            QMessageBox.critical(
                self, 'Error',
                'Choose at least one output variable before submit!',
                QMessageBox.Ok)
            return

        canceled, filename = save_dialog('Serafin',
                                         input_names=[
                                             self.input.first_data.filename,
                                             self.input.second_data.filename
                                         ])
        if canceled:
            return

        # deduce header from selected variable IDs
        output_header = self._getOutputHeader()
        time_indices = self.input.common_frames
        operation_type = {
            0: operations.PROJECT,
            1: operations.DIFF,
            2: operations.REV_DIFF,
            3: operations.MAX_BETWEEN,
            4: operations.MIN_BETWEEN
        }[self.operationBox.currentIndex()]
        self.parent.inDialog()
        progressBar = OutputProgressDialog()

        # do some calculations
        try:
            with Serafin.Read(self.input.first_data.filename,
                              self.input.first_data.language) as first_in:
                first_in.header = self.input.first_data.header
                first_in.time = self.input.first_data.time

                with Serafin.Read(
                        self.input.second_data.filename,
                        self.input.second_data.language) as second_in:
                    second_in.header = self.input.second_data.header
                    second_in.time = self.input.second_data.time

                    progressBar.setValue(5)
                    QApplication.processEvents()

                    with Serafin.Write(
                            filename,
                            self.input.first_data.language) as out_stream:

                        out_stream.write_header(output_header)
                        process = ProjectMeshThread(
                            first_in, second_in, out_stream, output_header,
                            self.input.is_inside,
                            self.input.point_interpolators, time_indices,
                            operation_type)
                        progressBar.connectToThread(process)
                        process.run()

                        if not process.canceled:
                            progressBar.outputFinished()
        except (Serafin.SerafinRequestError,
                Serafin.SerafinValidationError) as e:
            QMessageBox.critical(None, 'Serafin Error', e.message,
                                 QMessageBox.Ok, QMessageBox.Ok)
            return

        progressBar.exec_()
        self.parent.outDialog()