예제 #1
0
    def names_hab_change(self):
        """
        Ajust item list according to hdf5 filename selected by user
        """
        hdf5name = self.hab_filenames_qcombobox.currentText()
        # no file
        if not hdf5name:
            # clean
            self.hab_reach_qcombobox.clear()
            self.require_data_group.hide()
        # file
        if hdf5name:
            # clean
            self.hab_reach_qcombobox.clear()
            # create hdf5 class to get hdf5 inforamtions
            hdf5 = Hdf5Management(self.path_prj, hdf5name, new=False, edit=False)
            if hdf5.file_object:
                hdf5.get_hdf5_attributes(close_file=True)
                if len(hdf5.data_2d.reach_list) == 1:
                    reach_names = hdf5.data_2d.reach_list
                else:
                    reach_names = [""] + hdf5.data_2d.reach_list

                unit_type = hdf5.data_2d.unit_type
                if "Date" not in unit_type:
                    self.hab_reach_qcombobox.addItems(reach_names)
                else:
                    if self.sender().hasFocus():
                        self.send_log.emit(self.tr("Warning: This file contain date unit. "
                                                   "To be interpolated, file must contain discharge or timestep unit."))
예제 #2
0
    def unit_hdf5_change(self):
        selection_unit = self.units_QListWidget.selectedItems()
        # one file selected
        if len(selection_unit) > 0:
            hdf5name = self.file_selection_listwidget.selectedItems()[0].text()

            # create hdf5 class
            hdf5 = Hdf5Management(self.path_prj, hdf5name, new=False, edit=False)
            hdf5.load_hydrosignature()
            hdf5.close_file()

            if len(selection_unit) == 1 and selection_unit[0].text() == "all units":
                # get hs data
                hdf5.data_2d.get_hs_summary_data([element.row() for element in self.reach_QListWidget.selectedIndexes()],
                                                 list(range(hdf5.nb_unit)))
            else:
                # get hs data
                hdf5.data_2d.get_hs_summary_data([element.row() for element in self.reach_QListWidget.selectedIndexes()],
                                                 [element.row() for element in self.units_QListWidget.selectedIndexes()])

            # table
            mytablemodel = MyTableModel(hdf5.data_2d.hs_summary_data)
            self.result_tableview.setModel(mytablemodel)  # set model
            self.result_plot_button_area.setEnabled(True)
            self.result_plot_button_volume.setEnabled(True)
        else:
            mytablemodel = MyTableModel(["", ""])
            self.result_tableview.setModel(mytablemodel)  # set model
            self.result_plot_button_area.setEnabled(False)
            self.result_plot_button_volume.setEnabled(False)
예제 #3
0
 def names_hdf5_change_2(self):
     self.reach_QListWidget_2.clear()
     self.units_QListWidget_2.clear()
     selection_file = self.file_selection_listwidget_2.selectedItems()
     if selection_file:
         hdf5name = selection_file[0].text()
         hdf5 = Hdf5Management(self.path_prj, hdf5name, new=False, edit=False)
         hdf5.get_hdf5_attributes(close_file=True)
         # check reach
         self.reach_QListWidget_2.addItems(hdf5.data_2d.reach_list)
예제 #4
0
    def create_model_array_and_display(self, chronicle, types, source):
        hvum = HydraulicVariableUnitManagement()
        # get fish selected
        for selection in self.fish_available_qlistwidget.selectedItems():
            hvum.user_target_list.append(selection.data(Qt.UserRole))

        # get filename
        hdf5name = self.hab_filenames_qcombobox.currentText()

        # load hdf5 data
        hdf5 = Hdf5Management(self.path_prj, hdf5name, new=False, edit=False)
        hdf5.get_hdf5_attributes(close_file=True)

        # get reach_name
        reach_index = hdf5.data_2d.reach_list.index(self.hab_reach_qcombobox.currentText())

        # check matching units for interpolation
        valid, text = check_matching_units(hdf5.data_2d.unit_type, types)

        if not valid:
            self.send_log.emit("Warning: " + self.tr("Interpolation not done.") + text)
            # disable pushbutton
            self.plot_chronicle_qpushbutton.setEnabled(False)
            self.export_txt_chronicle_qpushbutton.setEnabled(False)
        if valid:
            data_to_table, horiz_headers, vertical_headers = compute_interpolation(hdf5.data_2d,
                                                                                         hvum.user_target_list,
                                                                                         reach_index,
                                                                                         chronicle,
                                                                                         types,
                                                                                         rounddata=True)
            for horiz_header_num, horiz_header in enumerate(horiz_headers):
                horiz_headers[horiz_header_num] = horiz_header.replace("m<sup>3</sup>/s", "m3/s")

            self.mytablemodel = MyTableModelHab(data_to_table, horiz_headers, vertical_headers, source=source)
            self.require_unit_qtableview.model().clear()
            self.require_unit_qtableview.setModel(self.mytablemodel)  # set model
            # ajust width
            header = self.require_unit_qtableview.horizontalHeader()
            for i in range(len(horiz_headers)):
                header.setSectionResizeMode(i, QHeaderView.ResizeToContents)
            self.require_unit_qtableview.verticalHeader().setDefaultSectionSize(
                self.require_unit_qtableview.verticalHeader().minimumSectionSize())
            self.plot_chronicle_qpushbutton.setEnabled(True)
            self.export_txt_chronicle_qpushbutton.setEnabled(True)
            self.send_log.emit(self.tr("Interpolation done. Interpolated values can now be view in graphic and export in text file."))
            # disable pushbutton
            self.plot_chronicle_qpushbutton.setEnabled(True)
            self.export_txt_chronicle_qpushbutton.setEnabled(True)
            self.interpolated_results_group.show()
            self.interpolated_results_group.setChecked(True)
예제 #5
0
 def reach_hdf5_change_2(self):
     selection_file = self.file_selection_listwidget_2.selectedItems()
     selection_reach = self.reach_QListWidget_2.selectedItems()
     self.units_QListWidget_2.clear()
     # one file selected
     if len(selection_reach) == 1:
         hdf5name = selection_file[0].text()
         hdf5 = Hdf5Management(self.path_prj, hdf5name, new=False, edit=False)
         hdf5.get_hdf5_attributes(close_file=True)
         # add units
         for item_text in hdf5.data_2d.unit_list[self.reach_QListWidget_2.currentRow()]:
             item = QListWidgetItem(item_text)
             item.setTextAlignment(Qt.AlignRight)
             self.units_QListWidget_2.addItem(item)
예제 #6
0
    def update_gui(self):
        selected_file_names = [
            selection_el.text()
            for selection_el in self.file_selection_listwidget.selectedItems()
        ]
        # computing_group
        hyd_names = get_filename_by_type_physic(
            "hydraulic", os.path.join(self.path_prj, "hdf5"))
        hab_names = get_filename_by_type_physic(
            "habitat", os.path.join(self.path_prj, "hdf5"))
        names = hyd_names + hab_names
        self.file_selection_listwidget.blockSignals(True)
        self.file_selection_listwidget.clear()
        if names:
            for name in names:
                # check
                try:
                    hdf5 = Hdf5Management(self.path_prj,
                                          name,
                                          new=False,
                                          edit=False)
                    hdf5.get_hdf5_attributes(close_file=True)
                    item_name = QListWidgetItem()
                    item_name.setText(name)
                    self.file_selection_listwidget.addItem(item_name)
                    if name in selected_file_names:
                        item_name.setSelected(True)
                    if True:  #TODO : sort files (hdf5 attributes available for HRR) .hyd, one whole profile for all units, ...
                        pass
                    else:
                        pass
                except:
                    self.send_log.emit(
                        self.
                        tr("Error: " + name +
                           " file seems to be corrupted. Delete it with HABBY or manually."
                           ))

        self.file_selection_listwidget.blockSignals(False)
        # preselection if one
        if self.file_selection_listwidget.count() == 1:
            self.file_selection_listwidget.selectAll()
예제 #7
0
    def reach_hab_change(self):
        hdf5name = self.hab_filenames_qcombobox.currentText()
        reach_name = self.hab_reach_qcombobox.currentText()
        self.unit_qlabel.setText("[]")
        # no file
        if not reach_name:
            # clean
            self.disable_and_clean_group_widgets(True)
        # file
        if reach_name:
            # clean
            self.disable_and_clean_group_widgets(False)
            # clean
            hdf5 = Hdf5Management(self.path_prj, hdf5name, new=False, edit=False)
            hdf5.get_hdf5_attributes(close_file=True)
            unit_type = hdf5.data_2d.unit_type
            unit_type = unit_type.replace("m3/s", "m<sup>3</sup>/s")
            unit_type_value = unit_type[unit_type.index("["):unit_type.index("]")+1]
            reach_index = hdf5.data_2d.reach_list.index(reach_name)
            units_name = hdf5.data_2d.unit_list[reach_index]

            # hab
            if hdf5.data_2d.hvum.hdf5_and_computable_list.habs().meshs().names_gui():
                for mesh in hdf5.data_2d.hvum.hdf5_and_computable_list.habs().meshs():
                    mesh_item = QListWidgetItem(mesh.name_gui, self.fish_available_qlistwidget)
                    mesh_item.setData(Qt.UserRole, mesh)
                    self.fish_available_qlistwidget.addItem(mesh_item)
                self.fish_available_qlistwidget.selectAll()
                # set min and max unit for from to by
            unit_number_list = list(map(float, units_name))
            min_unit = min(unit_number_list)
            max_unit = max(unit_number_list)
            self.unit_min_qlabel.setText(str(min_unit))
            self.unit_max_qlabel.setText(str(max_unit))
            self.unit_type_qlabel.setText(unit_type)
            # sequence
            if len(unit_number_list) > 1:
                self.from_qlineedit.setText(str(min_unit))
                self.to_qlineedit.setText(str(max_unit))
                self.unit_qlabel.setText(unit_type_value)
            elif len(unit_number_list) == 1:
                pass
예제 #8
0
    def export_empty_text_file(self):
        hdf5name = self.hab_filenames_qcombobox.currentText()
        if hdf5name:
            # create hdf5 class
            hdf5 = Hdf5Management(self.path_prj, hdf5name, new=False, edit=False)
            # get hdf5 inforamtions
            hdf5.get_hdf5_attributes(close_file=True)
            unit_type = hdf5.data_2d.unit_type
            units_name = hdf5.data_2d.unit_list[self.hab_reach_qcombobox.currentIndex()]
            unit_number = list(map(float, units_name))
            min_unit = min(unit_number)
            max_unit = max(unit_number)

            # export
            exported = export_empty_text_from_hdf5(unit_type, min_unit, max_unit, hdf5name, self.path_prj)
            if exported:
                self.send_log.emit(self.tr("Empty text has been exported in 'output/text' project folder. Open and fill it "
                                   "with the desired values and then import it in HABBY."))
            if not exported:
                self.send_log.emit('Error: ' + self.tr('The file has not been exported as it may be opened by another program.'))
예제 #9
0
    def update_gui(self):
        selected_file_names = [selection_el.text() for selection_el in self.file_selection_listwidget.selectedItems()]
        # computing_group
        hyd_names = get_filename_by_type_physic("hydraulic", os.path.join(self.path_prj, "hdf5"))
        hab_names = get_filename_by_type_physic("habitat", os.path.join(self.path_prj, "hdf5"))
        names = hyd_names + hab_names
        self.file_selection_listwidget.blockSignals(True)
        self.file_selection_listwidget.clear()
        self.hs_computed_listwidget.blockSignals(True)
        self.hs_computed_listwidget.clear()
        if names:
            for name in names:
                # filename
                item_name = QListWidgetItem()
                item_name.setText(name)
                self.file_selection_listwidget.addItem(item_name)
                if name in selected_file_names:
                    item_name.setSelected(True)
                # check
                item = QListWidgetItem()
                item.setText("")
                item.setFlags(item.flags() | Qt.ItemIsUserCheckable)
                try:
                    hdf5 = Hdf5Management(self.path_prj, name, new=False, edit=False)
                    hdf5.get_hdf5_attributes(close_file=True)
                    if hdf5.hs_calculated:
                        item.setCheckState(Qt.Checked)
                    else:
                        item.setCheckState(Qt.Unchecked)
                except:
                    self.send_log.emit(self.tr("Error: " + name + " file seems to be corrupted. Delete it with HABBY or manually."))
                self.hs_computed_listwidget.addItem(item)

                item.setTextAlignment(Qt.AlignCenter)

        self.file_selection_listwidget.blockSignals(False)
        self.hs_computed_listwidget.blockSignals(False)
        # preselection if one
        if self.file_selection_listwidget.count() == 1:
            self.file_selection_listwidget.selectAll()
예제 #10
0
    def names_hdf5_change(self):
        self.reach_QListWidget.clear()
        self.units_QListWidget.clear()
        selection = self.file_selection_listwidget.selectedItems()
        if selection:
            # read
            hdf5name = selection[0].text()
            hdf5 = Hdf5Management(self.path_prj, hdf5name, new=False, edit=False)
            hdf5.get_hdf5_attributes(close_file=True)
            # check reach
            self.reach_QListWidget.addItems(hdf5.data_2d.reach_list)

            self.input_class_h_lineedit.setText(", ".join(list(map(str, hdf5.hs_input_class[0]))))
            self.input_class_v_lineedit.setText(", ".join(list(map(str, hdf5.hs_input_class[1]))))

            self.input_class_plot_button.setEnabled(True)

            self.toggle_group(False)
            self.input_result_group.show()
            self.toggle_group(True)

        else:
            self.input_result_group.hide()
예제 #11
0
파일: hrr.py 프로젝트: YannIrstea/habby
def hrr(hrr_description, progress_value, q=[], print_cmd=False, project_properties={}):
    if not print_cmd:
        sys.stdout = mystdout = StringIO()
    # progress
    progress_value.value = 10

    # deltatlist = hrr_description["deltatlist"]
    deltatlist = [0,3.6*3600,2.5*3600,1.8*3600]  # TODO: change it
    input_filename_1 = hrr_description["hdf5_name"]
    path_prj = project_properties["path_prj"]

    # load file
    hdf5_1 = Hdf5Management(path_prj, input_filename_1, new=False, edit=False)
    # Todo check only one wholeprofile
    #Todo rajouter datamesh dans le cas d'un volume fini
    hdf5_1.load_hdf5(whole_profil=True)
    unit_list = [["temp"] * (hdf5_1.data_2d[0].unit_number - 1)]  # TODO: multi reach not done
    new_data_2d = Data2d(reach_number=hdf5_1.data_2d.reach_number,
                         unit_list=unit_list)  # new
    # get attr
    new_data_2d.__dict__.update(hdf5_1.data_2d.__dict__)  # copy all attr
    # loop
    for reach_number in range(hdf5_1.data_2d.reach_number):

        # progress
        delta_reach = 90 / new_data_2d.reach_number

        xy_whole_profile = hdf5_1.data_2d_whole[reach_number][0]["node"]["xy"]
        z_whole_profile = hdf5_1.data_2d_whole[reach_number][0]["node"]["z"]
        tin_whole_profile = hdf5_1.data_2d_whole[reach_number][0]["mesh"]["tin"]
        locawp, countcontactwp = connectivity_mesh_table(tin_whole_profile)
        countcontactwp = countcontactwp.flatten()

        #data adjustment for whole profile
        hdf5_1.data_2d_whole[reach_number][0]["mesh"]["data"] = pd.DataFrame()
        hdf5_1.data_2d_whole[reach_number][0]["node"]["data"]=pd.DataFrame(hdf5_1.data_2d_whole[reach_number][0]["node"]["z"],columns=["z"])
        #calculation of max_slope_bottom for the whole profile
        hdf5_1.data_2d_whole[reach_number][0].c_mesh_max_slope_bottom()
        max_slope_bottom_whole_profile = hdf5_1.data_2d_whole[reach_number][0]["mesh"]["data"][
            hdf5_1.data_2d_whole.hvum.max_slope_bottom.name].to_numpy()
        unit_counter_3 = -1
        for unit_number in range(len(hdf5_1.data_2d[0])-1,0,-1): #Todo transitoire

            # progress
            delta_unit = delta_reach / len(range(len(hdf5_1.data_2d[0])-1,0,-1))

            unit_counter_3 += 1
            # Todo et recuperer temps depuis deltatlist
            deltat=deltatlist[unit_number]

            q1=hdf5_1.data_2d[reach_number][unit_number].unit_name #q1>q2
            q2 = hdf5_1.data_2d[reach_number][unit_number-1].unit_name  # q2<q1
            #Todo check that the discharge are increasing time step hydropeaking the flow is increasing or decreasing  TXT file must indicate time interval and the way the information is sorted
            tin1 = hdf5_1.data_2d[reach_number][unit_number]["mesh"]["tin"]
            tin2 = hdf5_1.data_2d[reach_number][unit_number-1]["mesh"]["tin"]
            datamesh1 = hdf5_1.data_2d[reach_number][unit_number]["mesh"]["data"]  # TODO: pandas_array.iloc
            # locawp, countcontactwp = connectivity_mesh_table(tin1)
            # loca1, countcontact1=connectivity_mesh_table(tin1)
            # loca2, countcontact2 = connectivity_mesh_table(tin2)
            # countcontact1 = countcontact1.flatten()
            # countcontact2 = countcontact2.flatten()

            hdf5_1.data_2d[reach_number][unit_number].c_mesh_mean_from_node_values('h')
            hmoy1=hdf5_1.data_2d[reach_number][unit_number]["mesh"]["data"]['h'].to_numpy()
            hdf5_1.data_2d[reach_number][unit_number].c_mesh_mean_from_node_values('z')
            zsurf1 =hmoy1+ hdf5_1.data_2d[reach_number][unit_number]["mesh"]["data"]['z'].to_numpy()
            hdf5_1.data_2d[reach_number][unit_number-1].c_mesh_mean_from_node_values('h')
            hmoy2 = hdf5_1.data_2d[reach_number][unit_number-1]["mesh"]["data"]['h'].to_numpy()
            hdf5_1.data_2d[reach_number][unit_number-1].c_mesh_mean_from_node_values('z')
            zsurf2 =hmoy2+ hdf5_1.data_2d[reach_number][unit_number-1]["mesh"]["data"]['z'].to_numpy()
            i_split1 = hdf5_1.data_2d[reach_number][unit_number]["mesh"]["data"]["i_split"]
            i_split2 = hdf5_1.data_2d[reach_number][unit_number - 1]["mesh"]["data"]["i_split"]
            xy1 = hdf5_1.data_2d[reach_number][unit_number]["node"]["xy"]
            xy2 = hdf5_1.data_2d[reach_number][unit_number-1]["node"]["xy"]
            # TODO: pandas data can have several dtype
            datanode1=hdf5_1.data_2d[reach_number][unit_number]["node"]["data"].to_numpy()
            datanode2 = hdf5_1.data_2d[reach_number][unit_number]["node"]["data"].to_numpy()


            i_whole_profile1 = hdf5_1.data_2d[reach_number][unit_number]["mesh"]["i_whole_profile"]
            i_whole_profile2 = hdf5_1.data_2d[reach_number][unit_number-1]["mesh"]["i_whole_profile"]
            sortwp1, sortwp2, iwholedone, rwp1, rwp2=analyse_whole_profile(i_whole_profile1, i_whole_profile2)
            imeshpt3=0
            tin3 = []
            datamesh3=[]
            i_whole_profile3 = []
            i_split3 = []
            max_slope_bottom3=[]
            deltaz3=[]
            xy3=[]
            datanode3=[]

            def store_mesh_tin1(k,imeshpt3):
                i_whole_profile3.append(iwp)
                max_slope_bottom3.append(max_slope_bottom_whole_profile[iwp])
                deltaz3.append(deltaz3_)
                i_split3.append(
                    0)  # even in the case of isplit1=1 ie cut2D have left a triangle part of the mesh that was partially wetted

                for i3 in range(3):
                    xy3.append(xy1[tin1[sortwp1[rwp1[iwp][0] + k][1]][i3]])
                    datanode3.append(datanode1[tin1[sortwp1[rwp1[iwp][0] + k][1]][i3]])
                tin3.append([imeshpt3, imeshpt3 + 1, imeshpt3 + 2])
                datamesh3.append(datamesh1.iloc[sortwp1[rwp1[iwp][0] + k]])

                iwholedone[iwp] = 1

            # progress
            delta_mesh = delta_unit / len(iwholedone)

            for iwp in range(len(iwholedone)):

                # progress
                progress_value.value = progress_value.value + delta_mesh

                if iwholedone[iwp]==0:
                    if rwp1[iwp][1]==0: #  CASE 0  the tin1 mesh is dryed
                        if rwp2[iwp][1]==0:
                            iwholedone[iwp]=2
                        else:
                            iwholedone[iwp] = -1
                    elif rwp1[iwp][1]==1:
                        if rwp2[iwp][1]==0: # CASE 1a & 1b the tin1 mesh has been dryed
                            deltaz3_ = calculate_deltaz3(iwp, locawp, countcontactwp, sortwp1, sortwp2, rwp1, rwp2,
                                                         tin1, tin2, zsurf1, zsurf2)
                            store_mesh_tin1(0, imeshpt3)
                            imeshpt3 += 3

                        elif rwp2[iwp][1] ==1:
                            if i_split1[sortwp1[rwp1[iwp][0]][1]]==1 and i_split2[sortwp2[rwp2[iwp][0]][1]]==1:




                                titi=3




                            elif i_split1[sortwp1[rwp1[iwp][0]][1]]==0 and i_split2[sortwp2[rwp2[iwp][0]][1]]==0: #CASE 1a
                                iwholedone[iwp] = 2
                            else:
                                iwholedone[iwp] = -1

                        # elif rwp2[iwp][1]>1: # the mesh has been partially dryed
                        #     deltaz3com=calculate_deltaz3(iwp, locawp, countcontactwp, sortwp1, sortwp2, rwp1, rwp2,
                        #                           tin1, tin2, zsurf1, zsurf2)
                        #     xyp=[]
                        #     datameshp=[]
                        #     for i3 in range(3):
                        #         xyp.append(xy1[tin1[sortwp1[ rwp1[iwp][0] ][1]][i3]])
                        #         datameshp.append(datanode1[tin1[sortwp1[rwp1[iwp][0]][1]][i3]])
                        #     xyp_=np.array(xyp)
                        #     datameshp_=np.array(datameshp)
                        #
                        #     for j in range(rwp2[iwp][1]):
                        #         tin3.append([imeshpt3, imeshpt3 + 1, imeshpt3 + 2])
                        #         datamesh3.append(datamesh1.iloc[sortwp1[rwp1[iwp][0] ]]) # ou quelque chose du genre
                        #         i_whole_profile3.append(iwp)
                        #         max_slope_bottom3.append(max_slope_bottom_whole_profile[iwp])
                        #         deltaz3.append(deltaz3com)
                        #         i_split3.append(1)
                        #         imeshpt3 += 3
                        #         xyp=np.array()
                        #         for i3 in range(3):
                        #             xy3_=xy2[tin2[sortwp2[rwp2[iwp][0]][1] + j][i3]]
                        #             xy3.append(xy3_)
                        #             datanode3_=finite_element_interpolation(xy3_,xyp_,datameshp_)
                        #             datanode3.append(datanode3_)
                        #     iwholedone[iwp] = 1
                    elif rwp1[iwp][1] == 2:
                        if rwp2[iwp][1] == 0:  # CASE 3a the tin1 2 meshes has been dryed
                            deltaz3_ =calculate_deltaz3(iwp, locawp, countcontactwp, sortwp1, sortwp2, rwp1, rwp2,
                                                             tin1, tin2, zsurf1, zsurf2)
                            for k in range(2):
                                store_mesh_tin1(k,imeshpt3)
                                imeshpt3 += 3


                    else: # unknown domain
                                iwholedone[iwp] = 2

            tin3 = np.array(tin3)
            i_whole_profile3 = np.array(i_whole_profile3)
            i_split3 = np.array(i_split3)
            max_slope_bottom3=np.array(max_slope_bottom3)
            deltaz3=np.array(deltaz3)
            hrr3=(deltaz3/max_slope_bottom3)/(deltat*3600)
            xy3=np.array(xy3)
            datanode3=np.array(datanode3)
            #TODO datamesh3

            #remove_duplicate_points
            xy3b, indices3, indices2 = np.unique(xy3, axis=0, return_index=True, return_inverse=True)
            if len(xy3b)<len(xy3):
                tin3= indices2[tin3]
                datanode3= datanode3[indices3]

            unit_list[reach_number][unit_counter_3] = q1+'-'+q2
            new_data_2d[reach_number][unit_counter_3].unit_name = q1+'-'+q2
            new_data_2d[reach_number][unit_counter_3]["mesh"]["tin"] = tin3
            new_data_2d[reach_number][unit_counter_3]["mesh"]["data"] = pd.DataFrame()  # TODO: datamesh3 (à l'origine iwhole,isplikt et peut être des choses en volume fini) il faut refaire un pandas data mesh with pandas_array.iloc
            new_data_2d[reach_number][unit_counter_3]["mesh"]["i_whole_profile"] = i_whole_profile3
            new_data_2d[reach_number][unit_counter_3]["mesh"]["data"]["i_split"] = i_split3
            new_data_2d[reach_number][unit_counter_3]["mesh"]["data"]["max_slope_bottom"] = max_slope_bottom3
            new_data_2d[reach_number][unit_counter_3]["mesh"]["data"]["delta_level"] = deltaz3
            new_data_2d[reach_number][unit_counter_3]["mesh"]["data"]["hrr"] = hrr3
            new_data_2d[reach_number][unit_counter_3]["node"]["xy"] = xy3b
            new_data_2d[reach_number][unit_counter_3]["node"]["data"] = pd.DataFrame(datanode3, columns=hdf5_1.data_2d[reach_number][unit_number]["node"]["data"].columns)

    # hvum copy
    new_data_2d.hvum = hdf5_1.data_2d.hvum
    # delta_level
    new_data_2d.hvum.delta_level.position = "mesh"
    new_data_2d.hvum.delta_level.hdf5 = True
    new_data_2d.hvum.hdf5_and_computable_list.append(new_data_2d.hvum.delta_level)
    # hrr
    new_data_2d.hvum.hrr.position = "mesh"
    new_data_2d.hvum.hrr.hdf5 = True
    new_data_2d.hvum.hdf5_and_computable_list.append(new_data_2d.hvum.hrr)

    # compute area  # TODO: get original areas
    new_data_2d.compute_variables([new_data_2d.hvum.area])
    # get_dimension
    new_data_2d.get_dimension()
    # export new hdf5
    hdf5 = Hdf5Management(path_prj, hdf5_1.filename[:-4] + "_HRR" + hdf5_1.extension, new=True)
    # HYD
    new_data_2d.unit_list = unit_list  # update
    # new_data_2d.path_filename_source = hdf5_1.data_2d.path_filename_source
    # new_data_2d.hyd_unit_correspondence = hdf5_1.data_2d.hyd_unit_correspondence
    # new_data_2d.hyd_model_type = hdf5_1.data_2d.hyd_model_type
    hdf5.create_hdf5_hyd(new_data_2d,
                         hdf5_1.data_2d_whole,
                         project_properties)

    # warnings
    if not print_cmd:
        sys.stdout = sys.__stdout__
        if q:
            q.put(mystdout)
            sleep(0.1)  # to wait q.put() ..

    # prog
    progress_value.value = 100.0
예제 #12
0
def mesh_manager(mesh_manager_description,
                 progress_value,
                 q=[],
                 print_cmd=False,
                 project_properties={}):
    if not print_cmd:
        sys.stdout = mystdout = StringIO()
    # progress
    progress_value.value = 10

    # load file
    hdf5_original = Hdf5Management(project_properties["path_prj"],
                                   mesh_manager_description["hdf5_name"],
                                   new=False,
                                   edit=False)
    hdf5_original.load_hdf5(user_target_list="all", whole_profil=True)

    # index case
    eliminate = False
    if "eliminate" in mesh_manager_description["header"]:
        eliminate = True
    elif "keep" in mesh_manager_description["header"]:
        eliminate = False

    # init
    reach_index = []
    unit_index = []

    hydraulic_class = False
    if "hydraulic_class" in mesh_manager_description["header"]:
        hydraulic_class = True
        for reach_number in range(len(hdf5_original.data_2d)):
            for unit_number in range(len(hdf5_original.data_2d[reach_number])):
                reach_index.append(reach_number)
                unit_index.append(unit_number)
        # check if hs_mesh (hydrosginature mesh)
        if not hdf5_original.hs_mesh:
            print("Error: " + mesh_manager_description["header"] +
                  " is not possible on " + hdf5_original.filename +
                  ". The latter is not a 2d mesh from hydrosignature.")
            # warnings
            if not print_cmd:
                sys.stdout = sys.__stdout__
                if q:
                    q.put(mystdout)
                    sleep(0.1)  # to wait q.put() ..
            return

    elif "cell_index" in mesh_manager_description["header"]:
        hydraulic_class = False
        # check if reach_index and unit_index for each row of mesh_manager file exist in hdf5 file
        for mm_row_index in range(len(
                mesh_manager_description["reach_index"])):
            try:
                hdf5_original.data_2d[mesh_manager_description["reach_index"]
                                      [mm_row_index]]
            except TypeError or IndexError:
                print("Error: specified reach_index (" + str(
                    mesh_manager_description["reach_index"][mm_row_index]) +
                      ") not exist in " + hdf5_original.filename)
                # warnings
                if not print_cmd:
                    sys.stdout = sys.__stdout__
                    if q:
                        q.put(mystdout)
                        sleep(0.1)  # to wait q.put() ..
                return
            try:
                hdf5_original.data_2d[
                    mesh_manager_description["reach_index"][mm_row_index]][
                        mesh_manager_description["unit_index"][mm_row_index]]
            except IndexError:
                print(
                    "Error: specified unit_index (" +
                    str(mesh_manager_description["unit_index"][mm_row_index]) +
                    ") not exist in selected reach_index (" +
                    str(mesh_manager_description["reach_index"]
                        [mm_row_index]) + ") in " + hdf5_original.filename)
                # warnings
                if not print_cmd:
                    sys.stdout = sys.__stdout__
                    if q:
                        q.put(mystdout)
                        sleep(0.1)  # to wait q.put() ..
                return
        reach_index = mesh_manager_description["reach_index"]
        unit_index = mesh_manager_description["unit_index"]

    # progress
    delta_row = 80 / len(reach_index)

    animal_variable_list = hdf5_original.data_2d.hvum.hdf5_and_computable_list.habs(
    )

    for mm_row_index in range(len(reach_index)):
        reach_number = reach_index[mm_row_index]
        unit_number = unit_index[mm_row_index]
        # get cell_index
        if hydraulic_class:
            cell_array_bool = np.in1d(
                hdf5_original.data_2d[reach_number][unit_number]["mesh"]
                ["data"][hdf5_original.data_2d.hvum.hydraulic_class.name],
                mesh_manager_description["mesh_manager_data"])
            cell_index = np.argwhere(cell_array_bool).flatten().tolist()
        else:
            cell_index = mesh_manager_description["mesh_manager_data"][
                mm_row_index]

        same_len = len(hdf5_original.data_2d[reach_number][unit_number]["mesh"]
                       ["tin"]) == len(cell_index)

        # change data
        if eliminate:  # eliminate
            if same_len:
                print("Warning: All cell of unit " +
                      hdf5_original.data_2d[reach_number]
                      [unit_number].unit_name + " are removed.")

            hdf5_original.data_2d[reach_number][unit_number]["mesh"][
                "tin"] = np.delete(
                    hdf5_original.data_2d[reach_number][unit_number]["mesh"]
                    ["tin"], cell_index, 0)
            hdf5_original.data_2d[reach_number][unit_number]["mesh"][
                "i_whole_profile"] = np.delete(
                    hdf5_original.data_2d[reach_number][unit_number]["mesh"]
                    ["i_whole_profile"], cell_index, 0)
            hdf5_original.data_2d[reach_number][unit_number]["mesh"][
                "data"] = hdf5_original.data_2d[reach_number][unit_number][
                    "mesh"]["data"].drop(cell_index)
        else:  # keep
            if same_len:
                print("Warning: All selected cell of unit " +
                      hdf5_original.data_2d[reach_number]
                      [unit_number].unit_name + " are keep. Nothing happen.")
            hdf5_original.data_2d[reach_number][unit_number]["mesh"][
                "tin"] = hdf5_original.data_2d[reach_number][unit_number][
                    "mesh"]["tin"][cell_index]
            hdf5_original.data_2d[reach_number][unit_number]["mesh"][
                "i_whole_profile"] = hdf5_original.data_2d[reach_number][
                    unit_number]["mesh"]["i_whole_profile"][cell_index]
            hdf5_original.data_2d[reach_number][unit_number]["mesh"][
                "data"] = hdf5_original.data_2d[reach_number][unit_number][
                    "mesh"]["data"].iloc[cell_index]

        # remove_unused_node
        hdf5_original.data_2d[reach_number][unit_number].remove_unused_node()

        # refresh hsi summary
        if animal_variable_list:
            for animal_index, animal_variable in enumerate(
                    animal_variable_list):
                area = hdf5_original.data_2d[reach_number][unit_number][
                    "mesh"]["data"][hdf5_original.data_2d.hvum.area.name]
                hsi = hdf5_original.data_2d[reach_number][unit_number]["mesh"][
                    "data"][animal_variable.name]
                hdf5_original.data_2d[reach_number][
                    unit_number].total_wet_area = np.sum(area)
                # compute summary
                wua = np.nansum(hsi * area)
                if any(np.isnan(hsi)):
                    area = np.sum(
                        hdf5_original.data_2d[reach_number][unit_number]
                        ["mesh"]["data"][hdf5_original.data_2d.hvum.area.name]
                        [~np.isnan(hsi)])
                    # osi = wua / area
                    percent_area_unknown = (
                        1 - (area / hdf5_original.data_2d[reach_number]
                             [unit_number].total_wet_area)
                    ) * 100  # next to 1 in top quality, next to 0 is bad or EVIL !
                else:
                    percent_area_unknown = 0.0
                osi = wua / hdf5_original.data_2d[reach_number][
                    unit_number].total_wet_area
                # save data
                animal_variable_list[animal_index].wua[reach_number][
                    unit_number] = wua
                animal_variable_list[animal_index].osi[reach_number][
                    unit_number] = osi
                animal_variable_list[animal_index].percent_area_unknown[
                    reach_number][unit_number] = percent_area_unknown

        # progress
        progress_value.value = progress_value.value + delta_row

    # prog
    progress_value.value = 90.0

    hdf5_original.data_2d.hvum.hdf5_and_computable_list = hdf5_original.data_2d.hvum.hdf5_and_computable_list.no_habs(
    )
    hdf5_original.data_2d.hvum.hdf5_and_computable_list.extend(
        animal_variable_list)

    # new_filename
    new_filename = hdf5_original.filename[:-4] + "_MM" + hdf5_original.extension

    # mm
    for key, value in mesh_manager_description.items():
        if key not in ("hdf5_name", "hdf5_name_list"):
            setattr(hdf5_original.data_2d, "mm_" + key, value)

    # get_dimension
    hdf5_original.data_2d.get_dimension()

    # export hdf5_new
    hdf5_new = Hdf5Management(project_properties["path_prj"],
                              new_filename,
                              new=True)
    if hdf5_original.extension == ".hyd":
        hdf5_new.create_hdf5_hyd(hdf5_original.data_2d,
                                 hdf5_original.data_2d_whole,
                                 project_properties)
    else:
        hdf5_new.create_hdf5_hab(hdf5_original.data_2d,
                                 hdf5_original.data_2d_whole,
                                 project_properties)  # remove_fish_hab

    # write hydrosignature to new file
    if hdf5_new.hs_calculated:
        hdf5_original = Hdf5Management(project_properties["path_prj"],
                                       hdf5_original.data_2d.filename,
                                       new=False,
                                       edit=False)
        hdf5_original.load_hydrosignature()
        # TODO: change new file hydrosignature
        # set to new file
        hdf5_new = Hdf5Management(project_properties["path_prj"],
                                  hdf5_new.filename,
                                  new=False,
                                  edit=True)
        hdf5_new.get_hdf5_attributes(close_file=False)
        hdf5_new.load_units_index()
        hdf5_new.load_data_2d()
        hdf5_new.load_whole_profile()
        hdf5_new.data_2d = hdf5_original.data_2d
        hdf5_new.write_hydrosignature(hs_export_mesh=hdf5_original.hs_mesh)
        hdf5_new.close_file()

    # warnings
    if not print_cmd:
        sys.stdout = sys.__stdout__
        if q:
            q.put(mystdout)
            sleep(0.1)  # to wait q.put() ..

    # prog
    progress_value.value = 100.0
예제 #13
0
def calc_hab_and_output(hab_filename,
                        animal_variable_list,
                        progress_value,
                        q=[],
                        print_cmd=False,
                        project_properties={}):
    """
    This function calculates the habitat and create the outputs for the habitat calculation. The outputs are: text
    output (wua and cells by cells), shapefile, paraview files, one 2d figure by time step. The 1d figure
    is done on the main thread as we want to show it to the user on the GUI. This function is called by calc_hab_GUI.py
    on a second thread to minimize the freezing on the GUI.

    :param hab_filename: the name of the hdf5 with the results
    :param path_hdf5: the path to the merged file
    :param pref_file_list: the name of the xml biological data
    :param stage_list: the stage chosen (youngs, adults, etc.). List with the same length as bio_names.
    :param code_alternative_list: the name of the chosen fish
    :param name_fish_sh: In a shapefile, max 8 character for the column name. Hence, a modified name_fish is needed.
    :param run_choice: dict with two lists : one for hyd opt and second for sub opt
    :param path_bio: The path to the biological folder (with all files given in bio_names)
    :param path_txt: the path where to save the text file
    :param path_shp: the path where to save shapefile
    :param path_para: the path where to save paraview output
    :param path_im: the path where to save the image
    :param path_im_bio: the path where are the image of the fish
    :param q: used in the second thread
    :param print_cmd: if True the print command is directed in the cmd, False if directed to the GUI
    :param project_properties: the options to crete the figure if save_fig1d is True
    :param xmlfiles: the list of the xml file (only useful to get the preference curve report, so not used by habby_cmd)

    ** Technical comments**

    This function redirect the sys.stdout. The point of doing this is because this function will be call by the GUI or
    by the cmd. If it is called by the GUI, we want the output to be redirected to the windows for the log under HABBY.
    If it is called by the cmd, we want the print function to be sent to the command line. We make the switch here.
    """
    # print output
    if not print_cmd:
        sys.stdout = mystdout = StringIO()

    # get translation
    qt_tr = get_translator(project_properties['path_prj'])

    # progress
    progress_value.value = 10

    # if exists
    if not os.path.exists(
            os.path.join(project_properties['path_prj'], "hdf5",
                         hab_filename)):
        print('Error: ' + qt_tr.translate(
            "calcul_hab_mod", "The specified file : " + hab_filename +
            " don't exist."))
        # warnings
        if not print_cmd:
            sys.stdout = sys.__stdout__
            if q:
                q.put(mystdout)
                sleep(0.1)  # to wait q.put() ..
        return

    # load data and get variable to compute
    hdf5_path = os.path.dirname(
        os.path.join(project_properties['path_prj'], "hdf5"))
    hdf5 = Hdf5Management(hdf5_path, hab_filename, new=False, edit=True)
    hdf5.load_hdf5(user_target_list=animal_variable_list)

    # progress
    delta_animal = 80 / len(animal_variable_list)

    # for each animal
    for animal in animal_variable_list:
        """ get bio model """
        # load bio data
        information_model_dict = read_pref(animal.pref_file)
        # search stage
        stage_index = None
        for i, stade_bio in enumerate(
                information_model_dict["stage_and_size"]):
            if animal.stage == stade_bio:
                stage_index = i

        # model_var
        model_var = information_model_dict["hab_variable_list"][stage_index]

        # substrate_classification_code
        data_2d_sub_classification_code = hdf5.data_2d.hvum.hdf5_and_computable_list.hdf5s(
        ).subs()[0].unit

        if animal.model_type == 'univariate suitability index curves':
            if "HEM" in information_model_dict["hydraulic_type_available"][
                    stage_index]:
                pref_hem_data = model_var.variable_list.get_from_name(
                    hdf5.data_2d.hvum.shear_stress.name).data
                if pref_hem_data[0][-1] == 0:
                    pref_hem_data[0][-1] = 1000
            else:
                if hdf5.data_2d.hvum.h.name in model_var.variable_list.names():
                    pref_height = model_var.variable_list.get_from_name(
                        hdf5.data_2d.hvum.h.name).data
                    # if the last value ends in 0 then change the corresponding value to x at 100 m
                    if pref_height[1][-1] == 0:
                        pref_height[0].append(1000)
                        pref_height[1].append(0)
                if hdf5.data_2d.hvum.v.name in model_var.variable_list.names():
                    pref_vel = model_var.variable_list.get_from_name(
                        hdf5.data_2d.hvum.v.name).data
                    # if the last value ends in 0 then change the corresponding value to x at 100 m
                    if pref_vel[1][-1] == 0:
                        pref_vel[0].append(100)
                        pref_vel[1].append(0)
                if model_var.variable_list.subs():
                    hsi_sub_classification_code = model_var.variable_list.get_from_name(
                        model_var.variable_list.subs()[0].name).unit
                    pref_sub = np.array(
                        model_var.variable_list.get_from_name(
                            model_var.variable_list.subs()[0].name).data)
                    if hsi_sub_classification_code == "Sandre" and data_2d_sub_classification_code == "Cemagref":
                        # convert substrate hsi to Cemagref
                        pref_sub = sandre_to_cemagref_by_percentage_array(
                            pref_sub)

        else:  # bivariate
            pref_height = model_var.variable_list.get_from_name(
                hdf5.data_2d.hvum.h.name).data
            pref_vel = model_var.variable_list.get_from_name(
                hdf5.data_2d.hvum.v.name).data

        # progress
        delta_reach = delta_animal / hdf5.data_2d.reach_number

        # for each reach
        for reach_number in range(hdf5.data_2d.reach_number):
            warning_shearstress_list = []
            warning_range_list = []
            # progress
            delta_unit = delta_reach / hdf5.data_2d[reach_number].unit_number

            # for each unit
            for unit_number in range(hdf5.data_2d[reach_number].unit_number):
                """ get 2d data """
                height_t = hdf5.data_2d[reach_number][unit_number]["mesh"][
                    "data"][hdf5.data_2d.hvum.h.name].to_numpy()
                vel_t = hdf5.data_2d[reach_number][unit_number]["mesh"][
                    "data"][hdf5.data_2d.hvum.v.name].to_numpy()

                if animal.aquatic_animal_type in {
                        "invertebrate"
                } and "HEM" in information_model_dict[
                        "hydraulic_type_available"][stage_index]:
                    shear_stress_t = hdf5.data_2d[reach_number][unit_number][
                        "mesh"]["data"][
                            hdf5.data_2d.hvum.shear_stress.name].to_numpy()
                ikle_t = hdf5.data_2d[reach_number][unit_number]["mesh"]["tin"]
                area = hdf5.data_2d[reach_number][unit_number]["mesh"]["data"][
                    hdf5.data_2d.hvum.area.name]
                """ compute habitat """
                # univariate
                if animal.model_type == 'univariate suitability index curves':
                    if "HEM" in information_model_dict[
                            "hydraulic_type_available"][stage_index]:
                        """ HEM pref """
                        # get pref x and y
                        pref_shearstress = pref_hem_data[0]
                        pref_values = pref_hem_data[2]
                        # nterp1d(...... kind='previous') for values <0.0771
                        pref_shearstress = [0.0] + pref_shearstress
                        pref_values = pref_values + [pref_values[-1]]
                        # check range suitability VS range input data
                        if max(pref_shearstress) < np.nanmax(shear_stress_t):
                            warning_range_list.append(unit_number)
                        # hem_interp_function
                        hem_interp_f = interp1d(pref_shearstress,
                                                pref_values,
                                                kind='previous',
                                                bounds_error=False,
                                                fill_value=np.nan)
                        with np.errstate(divide='ignore', invalid='ignore'):
                            hsi = hem_interp_f(shear_stress_t.flatten())
                        if any(np.isnan(shear_stress_t)):
                            warning_shearstress_list.append(unit_number)
                    else:
                        """ hydraulic pref """
                        if animal.hyd_opt in ["HV", "H"]:  # get H pref value
                            if max(pref_height[0]) < height_t.max(
                            ):  # check range suitability VS range input data
                                warning_range_list.append(unit_number)
                            h_pref_c = np.interp(height_t,
                                                 pref_height[0],
                                                 pref_height[1],
                                                 left=np.nan,
                                                 right=np.nan)
                        if animal.hyd_opt in ["HV", "V"]:  # get V pref value
                            if max(pref_vel[0]) < vel_t.max(
                            ):  # check range suitability VS range input data
                                warning_range_list.append(unit_number)
                            v_pref_c = np.interp(vel_t,
                                                 pref_vel[0],
                                                 pref_vel[1],
                                                 left=np.nan,
                                                 right=np.nan)
                        """ substrate pref """
                        # Neglect
                        if animal.sub_opt == "Neglect":
                            s_pref_c = np.array([1] * ikle_t.shape[0])
                        else:
                            # conca substrate data_2d to on numpy array
                            sub_t = np.empty(shape=(
                                ikle_t.shape[0],
                                len(hdf5.data_2d.hvum.hdf5_and_computable_list.
                                    hdf5s().subs().names())),
                                             dtype=np.int64)
                            for sub_class_num, sub_class_name in enumerate(
                                    hdf5.data_2d.hvum.hdf5_and_computable_list.
                                    hdf5s().subs().names()):
                                sub_t[:, sub_class_num] = hdf5.data_2d[
                                    reach_number][unit_number]["mesh"]["data"][
                                        sub_class_name]

                            # # sub_classification_code conversion ?
                            # print("------------------------")
                            # print("Warning: data_2d", data_2d_sub_classification_code)
                            # print("Warning: hsi", hsi_sub_classification_code)
                            if data_2d_sub_classification_code == "Sandre" and hsi_sub_classification_code == "Cemagref":
                                # convert substrate data_2d to Cemagref
                                if len(hdf5.data_2d.hvum.
                                       hdf5_and_computable_list.hdf5s().subs(
                                       )) > 2:  # percentage
                                    sub_t = sandre_to_cemagref_by_percentage_array(
                                        sub_t)
                                else:
                                    sub_t = sandre_to_cemagref_array(sub_t)

                            # Coarser-Dominant
                            if animal.sub_opt == "Coarser-Dominant":
                                if hdf5.data_2d.sub_classification_method == "percentage":
                                    s_pref_c_coarser = pref_substrate_coarser_from_percentage_description(
                                        pref_sub[1], sub_t)
                                    s_pref_c_dom = pref_substrate_dominant_from_percentage_description(
                                        pref_sub[1], sub_t)
                                    s_pref_c = (0.2 * s_pref_c_coarser) + (
                                        0.8 * s_pref_c_dom)
                                elif hdf5.data_2d.sub_classification_method == "coarser-dominant":
                                    s_pref_c_coarser = pref_sub[1][sub_t[:, 0]
                                                                   - 1]
                                    s_pref_c_dom = pref_sub[1][sub_t[:, 1] - 1]
                                    s_pref_c = (0.2 * s_pref_c_coarser) + (
                                        0.8 * s_pref_c_dom)
                            # Coarser
                            elif animal.sub_opt == "Coarser":
                                if hdf5.data_2d.sub_classification_method == "percentage":
                                    s_pref_c = pref_substrate_coarser_from_percentage_description(
                                        pref_sub[1], sub_t)
                                elif hdf5.data_2d.sub_classification_method == "coarser-dominant":
                                    s_pref_c = pref_sub[1][sub_t[:, 0] - 1]
                            # Dominant
                            elif animal.sub_opt == "Dominant":
                                if hdf5.data_2d.sub_classification_method == "percentage":
                                    s_pref_c = pref_substrate_dominant_from_percentage_description(
                                        pref_sub[1], sub_t)
                                elif hdf5.data_2d.sub_classification_method == "coarser-dominant":
                                    s_pref_c = pref_sub[1][sub_t[:, 1] - 1]
                            # Percentage
                            elif animal.sub_opt == "Percentage":
                                if hdf5.data_2d.sub_classification_method == "percentage":
                                    s_pref_c = np.sum(
                                        (sub_t / 100) * pref_sub[1], axis=1)
                                elif hdf5.data_2d.sub_classification_method == "coarser-dominant":
                                    s_pref_c_coarser = pref_sub[1][sub_t[:, 0]
                                                                   - 1]
                                    s_pref_c_dom = pref_sub[1][sub_t[:, 1] - 1]
                                    s_pref_c = (0.2 * s_pref_c_coarser) + (
                                        0.8 * s_pref_c_dom)
                            else:
                                print("Error: animal.sub_opt", animal.sub_opt,
                                      " not recognized.")
                        """ compute habitat value """
                        try:
                            # HV
                            if "H" in animal.hyd_opt and "V" in animal.hyd_opt:
                                hsi = h_pref_c * v_pref_c * s_pref_c
                                hsi[h_pref_c == 0] = 0
                                hsi[v_pref_c == 0] = 0
                                hsi[s_pref_c == 0] = 0
                            # H
                            elif "H" in animal.hyd_opt:
                                hsi = h_pref_c * s_pref_c
                                hsi[h_pref_c == 0] = 0
                                hsi[s_pref_c == 0] = 0
                            # V
                            elif "V" in animal.hyd_opt:
                                hsi = v_pref_c * s_pref_c
                                hsi[v_pref_c == 0] = 0
                                hsi[s_pref_c == 0] = 0
                            # Neglect
                            else:
                                hsi = s_pref_c
                        except ValueError:
                            print('Error: ' + qt_tr.translate(
                                "calcul_hab_mod",
                                'One time step misses substrate, velocity or water height value.'
                            ))
                            hsi = [-99]

                # bivariate suitability index models
                else:
                    # height data
                    if max(pref_height) < height_t.max(
                    ):  # check range suitability VS range input data
                        warning_range_list.append(unit_number)
                    # velocity data
                    if max(pref_vel) < vel_t.max(
                    ):  # check range suitability VS range input data
                        warning_range_list.append(unit_number)

                    # prep data
                    pref_vel = np.array(pref_vel)
                    pref_height = np.array(pref_height)
                    pref_xy_repeated = []
                    for row in range(len(pref_height)):
                        x_coord = np.repeat(pref_height[row], len(pref_vel))
                        y_coord = pref_vel
                        pref_xy_repeated.extend(list(zip(x_coord, y_coord)))
                    pref_xy_repeated = np.array(pref_xy_repeated)
                    xy_input = np.dstack((vel_t, height_t))

                    # calc from model points
                    hsi = griddata(pref_xy_repeated,
                                   model_var.hsi_model_data,
                                   xy_input,
                                   method='linear')[0]

                # compute summary
                wua = np.nansum(hsi * area)
                if any(np.isnan(hsi)):
                    area = np.sum(
                        hdf5.data_2d[reach_number][unit_number]["mesh"]["data"]
                        [hdf5.data_2d.hvum.area.name][~np.isnan(hsi)])
                    # osi = wua / area
                    percent_area_unknown = (
                        1 - (area / hdf5.data_2d[reach_number]
                             [unit_number].total_wet_area)
                    ) * 100  # next to 1 in top quality, next to 0 is bad or EVIL !
                else:
                    percent_area_unknown = 0.0
                osi = wua / hdf5.data_2d[reach_number][
                    unit_number].total_wet_area

                # get data
                hdf5.data_2d[reach_number][unit_number]["mesh"]["data"][
                    animal.name] = hsi
                if len(animal.wua) < hdf5.data_2d.reach_number:
                    animal.wua.append([])
                    animal.osi.append([])
                    animal.percent_area_unknown.append([])
                animal.wua[reach_number].append(wua)
                animal.osi[reach_number].append(osi)
                animal.percent_area_unknown[reach_number].append(
                    percent_area_unknown)

                # progress
                progress_value.value = int(progress_value.value + delta_unit)

            # WARNINGS
            if warning_range_list:
                warning_range_list = list(set(warning_range_list))
                warning_range_list.sort()
                # get unit name
                unit_names = []
                for warning_unit_num in warning_range_list:
                    unit_names.append(
                        hdf5.data_2d.unit_list[reach_number][warning_unit_num])
                print(f"Warning: " + qt_tr.translate(
                    "calcul_hab_mod", "Unknown habitat values produced for "
                ) + model_var.name + qt_tr.translate(
                    "calcul_hab_mod",
                    ", his suitability curve range is not sufficient according to the hydraulics of unit(s) : "
                ) + ", ".join(str(x) for x in unit_names) +
                      qt_tr.translate("calcul_hab_mod", " of reach : ") +
                      hdf5.data_2d.reach_list[reach_number])
            # WARNINGS HEM
            if animal.aquatic_animal_type in {"invertebrate"}:
                if warning_shearstress_list:
                    warning_shearstress_list.sort()
                    # get unit name
                    unit_names = []
                    for warning_unit_num in warning_shearstress_list:
                        unit_names.append(hdf5.data_2d.unit_list[reach_number]
                                          [warning_unit_num])
                    print(f"Warning: " + qt_tr.translate(
                        "calcul_hab_mod",
                        "Unknown habitat values produced for "
                    ) + model_var.name + " " + animal.stage + qt_tr.translate(
                        "calcul_hab_mod",
                        ", the shear stress data present unknown values in unit(s) : "
                    ) + ", ".join(str(x) for x in unit_names) +
                          qt_tr.translate("calcul_hab_mod", " of reach : ") +
                          hdf5.data_2d.reach_list[reach_number])

    # progress
    progress_value.value = 90

    # saving hdf5 data of the habitat value
    hdf5.add_fish_hab(animal_variable_list)

    # copy_or_not_user_pref_curve_to_input_folder
    for animal2 in animal_variable_list:
        copy_or_not_user_pref_curve_to_input_folder(animal2,
                                                    project_properties)

    # export
    export_dict = dict()
    nb_export = 0
    for key in hdf5.available_export_list:
        if project_properties[key][1]:
            nb_export += 1
        export_dict[key + "_" +
                    hdf5.extension[1:]] = project_properties[key][1]

    # export_osi_wua_txt
    hdf5.export_osi_wua_txt()

    # warnings
    if not print_cmd:
        sys.stdout = sys.__stdout__
        if q:
            q.put(mystdout)
            sleep(1)  # to wait q.put() ..

    # prog
    progress_value.value = 100.0