Ejemplo n.º 1
0
def download_lidar_dataset(dir_path, tile_list):
    """Download City of Vancouver LiDAR data"""
    print("Beginning file download with urllib2...")
    for tile in tile_list:
        src_url = (configure.get("Test", "src_url") + tile +
                   configure.get("Constants", "zip_ext"))
        dst_file = dir_path + tile + configure.get("Constants", "zip_ext")
        print("Downloading %s ..." % (configure.get("Test", "src_url") + tile +
                                      configure.get("Constants", "zip_ext")))
        download_url(src_url, dst_file)
Ejemplo n.º 2
0
    def __process_test_labelled_data(self):
        self.statusBar().showMessage("Processing...")
        # check if the testing data is downloaded
        self.__check_test_data()

        self.timer.restart()

        self.labelled_pipeline = ProcessingPipeline(notebook=True)
        self.labelled_pipeline.pre_process_las_files(
            configure["Test"]["dest_dir_path"])
        points_x, points_y = self.labelled_pipeline.collect_points_from_map()
        whole_campus_polygon_features = self.labelled_pipeline.extract_polygon_features(
            points_x, points_y, callback=self.__set_progressbar_value)
        self.labelled_pipeline.export_polygon_features_to_file(
            configure.get("Test", "output_map_file_path"),
            whole_campus_polygon_features,
        )

        points = np.vstack((points_x, points_y)).T

        start_time = time.perf_counter()
        clustering = DBSCAN(
            eps=configure.getfloat("Parameters", "eps"),
            min_samples=configure.getfloat("Parameters", "min_sample"),
            n_jobs=-1,
        ).fit(points)
        end_time = time.perf_counter()

        self.plotter.plot_path = configure.get("Constants",
                                               "plot_html_file_path")
        self.plotter.x = points_x
        self.plotter.y = points_y
        self.plotter.label = clustering.labels_
        self.plotter.display_2d_labelled_pcd(100000,
                                             save_file=True,
                                             render="png")
        self.webEngineView.reload()
        self.webEngineView.setUrl(
            QUrl("file://" + os.path.abspath(
                configure.get("Constants", "plot_html_file_path"))))

        self.__test_output_update(
            cluster_time=(end_time - start_time),
            number_of_clusters=np.amax(clustering.labels_),
            total_time=self.timer.elapsed(),
        )
        data_path = configure.get("Download", "dest_dir_path")
        self.__process_output_update(
            self.timer.elapsed() *
            self.labelled_pipeline.pre_processor.collect_las_file_from_folder(
                new_path=data_path),
            estimated=True)

        self.statusBar().showMessage("Done")
Ejemplo n.º 3
0
    def __on_click_apply(self):
        """[summary]
        """
        if self.tabWidget.currentIndex() == 0:
            self.statusBar().showMessage("Started data processing ...")

            self.timer.restart()

            data_path = configure["Download"]["dest_dir_path"]
            self.labelled_pipeline = ProcessingPipeline(notebook=True)
            if self.labelled_parameter_update:
                self.labelled_pipeline.reload = True
                self.labelled_parameter_update = False
            self.labelled_pipeline.pre_process_las_files(data_path)
            self.labelled_pipeline.collect_points_from_map()
            whole_campus_polygon_features = self.labelled_pipeline.extract_polygon_features(
                callback=self.__set_progressbar_value)
            self.labelled_pipeline.export_polygon_features_to_file(
                configure.get("Constants", "OUTPUT_MAP_FILE_PATH"),
                whole_campus_polygon_features,
            )

            self.__process_output_update(self.timer.elapsed(), estimated=False)
            self.statusBar().showMessage("Complete data processing")
        elif self.tabWidget.currentIndex() == 1:
            pass
Ejemplo n.º 4
0
    def __init__(self, data_dir):
        self.data_dir = data_dir
        self.lasfile_list = None
        self.min_east = None  # the most west tile
        self.min_north = None  # the most south tile
        self.min_filepath = None

        # check the folder structures
        Path(configure.get("Constants",
                           "data_folder_path")).mkdir(parents=True,
                                                      exist_ok=True)
        Path(configure.get("Constants",
                           "tests_folder_path")).mkdir(parents=True,
                                                       exist_ok=True)

        self.collect_las_file_from_folder()

        self.__find_the_corner_tile()
Ejemplo n.º 5
0
def main():
    pipeline = ProcessingPipeline(notebook=False)
    pipeline.pre_process_las_files(configure["Download"]["dest_dir_path"])
    points_x, points_y = pipeline.collect_points_from_map()
    whole_campus_polygon_features = pipeline.extract_polygon_features(
        points_x, points_y)
    pipeline.export_polygon_features_to_file(
        configure.get("Constants", "OUTPUT_MAP_FILE_PATH"),
        whole_campus_polygon_features,
    )
Ejemplo n.º 6
0
def unzip_files(src_dir):
    """Unzip files and then remove the zip files.

    Args:
        src_dir (string): source directory
    """
    for root, dirs, files in os.walk(src_dir):
        for file in files:
            if file.endswith(configure.get("Constants", "zip_ext")):
                print("Extracting " + os.path.join(root, file))
                with zipfile.ZipFile(os.path.join(root, file), "r") as zip_ref:
                    zip_ref.extractall(src_dir)
                os.remove(os.path.join(root, file))
Ejemplo n.º 7
0
    def filter_out_of_campus_points(self, whole_campus_x, whole_campus_y):
        """pre processing function to remove all the points that are not within campus. 

        Args:
            whole_campus_x (np.array): x "utm" coordinate in integer (east axis)
            whole_campus_y (np.array): y "utm" coordinate in integer (north axis)

        Returns:
            filtered_x, filtered_y: x, y array with out of campus points removed. 
        """
        if configure.getboolean("Configure", "debug"):
            print("filtering points that are not within campus...")

        # open the campus boundary geojson file and read it as a polygon
        file = open(configure.get("Constants", "boundary_geojson_file_path"))
        df = geopandas.read_file(file)

        # translate utm to geographic
        lat = (whole_campus_x / 100.0) + self.min_east * 100
        lon = (whole_campus_y / 100.0) + self.min_north * 100
        raw_geo = utm.to_latlon(lat, lon, 10, "U")

        # map 2d array to shapely points
        stacked = np.vstack((raw_geo[1], raw_geo[0])).T
        s = GeoSeries(map(Point, stacked))

        # query the points for faster processing.
        # according to https://stackoverflow.com/questions/62280398/checking-if-a-point-is-contained-in-a-polygon-multipolygon-for-many-points
        tree = STRtree(s)
        # we have to do a if check after query, according to the post
        res = [
            o for o in tree.query(df.geometry[0]) if df.geometry[0].contains(o)
        ]

        # translate from geographic to utm
        filtered_stacked = np.zeros((len(res), 2))
        count = 0
        for point in res:
            filtered_stacked[count, :] = np.asarray(point.xy).reshape(1, 2)
            count += 1
        raw_utm = utm.from_latlon(filtered_stacked[:, 1], filtered_stacked[:,
                                                                           0])
        filtered_x = (raw_utm[0] - self.min_east * 100) * 100
        filtered_y = (raw_utm[1] - self.min_north * 100) * 100
        filtered_x = filtered_x.astype(int)
        filtered_y = filtered_y.astype(int)

        if configure.getboolean("Configure", "debug"):
            print("filtering completed.")

        return filtered_x, filtered_y
Ejemplo n.º 8
0
 def collect_las_file_from_folder(self, new_path=None):
     """Collect all the las file from the input folder. It will only look
     for .las extension.
     """
     self.lasfile_list = []
     path = self.data_dir
     if new_path is not None:
         path = new_path
     for root, dirs, files in os.walk(path):
         for file in files:
             if file.endswith(configure.get("Constants", "las_ext")):
                 file_path = os.path.join(root, file)
                 self.lasfile_list.append(LasFile(file_path))
     if configure.getboolean("Configure", "debug"):
         print("Found total of %d LAS files." % (len(self.lasfile_list)))
     return len(self.lasfile_list)
Ejemplo n.º 9
0
    def collect_points_from_map(self):
        """[summary]

        Returns:
            [type]: [description]
        """

        if not self.reload and os.path.exists(
            configure.get("Constants", "pkl_file_path")
        ):
            # load the data from pkl if we choose not to reload, and the data file exists
            points = self.load_points_from_pkl()
            self.whole_campus_x = points[:, 0].T
            self.whole_campus_y = points[:, 1].T
            if configure.getboolean("Configure", "debug"):
                print("Loaded points from data file")
        else:
            if configure.getboolean("Configure", "debug"):
                print("Reloading points from LAS files")
            all_point_x = np.array([])
            all_point_y = np.array([])

            # extract all the points from pre processed files
            for las_file in self.pre_processor.lasfile_list:
                if las_file.valid:
                    all_point_x = np.append(all_point_x, las_file.point_x)
                    all_point_y = np.append(all_point_y, las_file.point_y)

            # remove points that are out of boundary
            self.whole_campus_x, self.whole_campus_y = self.pre_processor.filter_out_of_campus_points(
                all_point_x, all_point_y)
            if configure.getboolean("Configure", "debug"):
                print("Saving points into data file")
            self.save_points_as_pkl(
                np.vstack((self.whole_campus_x, self.whole_campus_y)).T)

            if configure.getboolean("Configure", "debug"):
                print("Reloaded points from LAS file")
        return self.whole_campus_x, self.whole_campus_y
Ejemplo n.º 10
0
    def __on_click_reset(self):
        if self.tabWidget.currentIndex() == 0:
            configure.read(LABELLED_CONFIG_PATH)
            self.scrollAreaWidget_lidar_labelled.findChild(
                QLineEdit, "lineEdit_down_size").setText(
                    configure.get("Parameters", "down_size"))
            self.scrollAreaWidget_lidar_labelled.findChild(
                QLineEdit,
                "lineEdit_eps").setText(configure.get("Parameters", "eps"))
            self.scrollAreaWidget_lidar_labelled.findChild(
                QLineEdit, "lineEdit_min_sample").setText(
                    configure.get("Parameters", "min_sample"))

            self.scrollAreaWidget_lidar_labelled.findChild(
                QLineEdit, "lineEdit_test_dir_path").setText(
                    configure.get("Test", "dest_dir_path"))
            self.scrollAreaWidget_lidar_labelled.findChild(
                QLineEdit, "lineEdit_data_dir_path").setText(
                    configure.get("Download", "dest_dir_path"))

            self.scrollAreaWidget_lidar_labelled.findChild(
                QLineEdit, "lineEdit_min_area").setText(
                    configure.get("Parameters", "min_polygon_area"))
            self.scrollAreaWidget_lidar_labelled.findChild(
                QLineEdit, "lineEdit_alpha").setText(
                    configure.get("Parameters", "alphashape_reduction"))
            self.scrollAreaWidget_lidar_labelled.findChild(
                QLineEdit, "lineEdit_max_area").setText(
                    configure.get("Parameters", "max_polygon_area"))

        elif self.tabWidget.currentIndex() == 1:
            unlabelled_configure.read(UNLABELLED_CONFIG_PATH)
            self.scrollAreaWidget_lidar_unlabelled.findChild(
                QLineEdit, "lineEdit_uniform_k_points").setText(
                    unlabelled_configure.get("Parameters",
                                             "uniform_down_k_point"))
            self.scrollAreaWidget_lidar_unlabelled.findChild(
                QLineEdit, "lineEdit_ground_diff").setText(
                    unlabelled_configure.get("Parameters", "ground_threshold"))
            self.scrollAreaWidget_lidar_unlabelled.findChild(
                QLineEdit, "lineEdit_distance_threshold").setText(
                    unlabelled_configure.get("Parameters",
                                             "distance_threshold"))

            self.scrollAreaWidget_lidar_unlabelled.findChild(
                QLineEdit, "lineEdit_eps_unlabel").setText(
                    unlabelled_configure.get("Parameters", "eps"))
            self.scrollAreaWidget_lidar_unlabelled.findChild(
                QLineEdit, "lineEdit_min_sample_unlabel").setText(
                    unlabelled_configure.get("Parameters", "min_points"))

            self.scrollAreaWidget_lidar_unlabelled.findChild(
                QLineEdit, "lineEdit_rgbvi").setText(
                    unlabelled_configure.get("Parameters", "rgbvi_threshold"))
            self.scrollAreaWidget_lidar_unlabelled.findChild(
                QLineEdit, "lineEdit_data_dir_path_unlabel").setText(
                    unlabelled_configure.get("Test", "las_file_path"))
            self.scrollAreaWidget_lidar_unlabelled.findChild(
                QLineEdit, "lineEdit_test_dir_path_unlabel").setText(
                    unlabelled_configure.get("Test", "pcd_output_path"))
            self.scrollAreaWidget_lidar_unlabelled.findChild(
                QLineEdit, "lineEdit_output_dir_path_unlabel").setText(
                    unlabelled_configure.get("Test", "las_file_output_path"))

        elif self.tabWidget.currentIndex() == 2:
            orthophoto_configure.read(ORTHOPHOTO_CONFIG_PATH)
            self.scrollAreaWidget_orthophoto.findChild(
                QLineEdit, "lineEdit_stddev").setText(
                    orthophoto_configure.get("Parameters",
                                             "standard_deviation_threshold"))

            self.scrollAreaWidget_orthophoto.findChild(
                QLineEdit, "lineEdit_h_min").setText(
                    orthophoto_configure.get("Parameters", "h_min"))
            self.scrollAreaWidget_orthophoto.findChild(
                QLineEdit, "lineEdit_s_min").setText(
                    orthophoto_configure.get("Parameters", "s_min"))
            self.scrollAreaWidget_orthophoto.findChild(
                QLineEdit, "lineEdit_v_min").setText(
                    orthophoto_configure.get("Parameters", "v_min"))
            self.scrollAreaWidget_orthophoto.findChild(
                QLineEdit, "lineEdit_h_max").setText(
                    orthophoto_configure.get("Parameters", "h_max"))
            self.scrollAreaWidget_orthophoto.findChild(
                QLineEdit, "lineEdit_s_max").setText(
                    orthophoto_configure.get("Parameters", "s_max"))
            self.scrollAreaWidget_orthophoto.findChild(
                QLineEdit, "lineEdit_v_max").setText(
                    orthophoto_configure.get("Parameters", "v_max"))
        self.statusBar().showMessage("Reset all parameters")
Ejemplo n.º 11
0
 def save_points_as_pkl(self, points):
     data = pd.DataFrame(data=points)
     data.to_pickle(configure.get(
         "Constants", "pkl_file_path"), compression="zip")
Ejemplo n.º 12
0
 def load_points_from_pkl(self):
     return pd.read_pickle(
         configure.get("Constants", "pkl_file_path"), compression="zip"
     ).to_numpy()
Ejemplo n.º 13
0
    def extract_forest_polygons_features(self, point_x, point_y):
        """extract only the large forest from the whole map, use manually tuned parameters

        Args:
            point_x ([type]): [description]
            point_y ([type]): [description]

        Returns:
            [type]: [description]
        """
        points = np.vstack((point_x, point_y)).T

        start_time = time.perf_counter()

        # Cluster the points based on paramters
        clustering = DBSCAN(
            eps=configure.getfloat("Parameters", "eps"),
            min_samples=configure.getfloat("Parameters", "min_sample"),
            n_jobs=-1,
        ).fit(points)

        end_time = time.perf_counter()
        self.processing_time += end_time - start_time

        if configure.getboolean("Configure", "debug"):
            print(
                "Clustering took %f seconds, found %d clusters"
                % (self.processing_time, np.amax(clustering.labels_))
            )

        polygons = []

        shapely_polygons = []

        # set up a progress bar

        max = 0
        for i in tqdm(np.arange(np.amax(clustering.labels_))):
            x_cluster = points[:, 0][np.where(clustering.labels_ == i)]
            y_cluster = points[:, 1][np.where(clustering.labels_ == i)]
            sample = np.vstack((x_cluster, y_cluster)).T

            if np.unique(sample, axis=0).shape[0] <= configure.getint(
                "Parameters", "min_size"
            ):
                continue

            alpha_opt = configure.getfloat("Constants", "default_alpha_shape")
            alpha_shape = alphashape.alphashape(sample, alpha_opt)

            if alpha_shape.area > max:
                max = alpha_shape.area
            #                    104204026795
            if alpha_shape.area > 5000000000:
                sample_size = sample.shape[0]
                # 6000
                reduce_to_1000 = (
                    lambda x: int(x) if x <= 500 else reduce_to_1000(x / 10)
                )
                desired_size = reduce_to_1000(sample_size)
                down_sample_index = np.random.choice(
                    np.arange(sample_size), desired_size
                )
                # use optimized alpha shape value
                # TODO: don't use optimze, instead use pre-defined alpha
                alpha_shape = alphashape.alphashape(sample[down_sample_index])

                # save these polygons to a pkl file

                if alpha_shape.geom_type == configure.get(
                    "Constants", "alpha_shape_multipolygon_type"
                ):
                    # sometimes there will be more than one polygons from alpha shape.
                    for each_polyon in alpha_shape:
                        polygons.append(
                            self.__get_polygon_from_feature(each_polyon))
                        shapely_polygons.append(each_polyon)
                elif alpha_shape.geom_type == configure.get(
                    "Constants", "alpha_shape_polygon_type"
                ):
                    polygons.append(
                        self.__get_polygon_from_feature(alpha_shape))
                    shapely_polygons.append(alpha_shape)
        print("max is ", max)
        return polygons, shapely_polygons
Ejemplo n.º 14
0
    def extract_polygon_features(self, point_x=None, point_y=None, callback=None):
        """Extract polygons from given p oints

        Args:
            point_x (np.array): points in relative x frame
            point_y (np.array): points in relative y frame

        Returns:
            polygons (list): list of geojson polygon features
        """
        points = None
        if point_x is not None and point_y is not None:
            points = np.vstack((point_x, point_y)).T
        else:
            points = np.vstack((self.whole_campus_x, self.whole_campus_y)).T

        start_time = time.perf_counter()

        # Cluster the points based on paramters
        clustering = DBSCAN(
            eps=configure.getfloat("Parameters", "eps"),
            min_samples=configure.getfloat("Parameters", "min_sample"),
            n_jobs=-1,
        ).fit(points)

        end_time = time.perf_counter()
        self.processing_time += end_time - start_time

        if configure.getboolean("Configure", "debug"):
            print(
                "Clustering took %f seconds, found %d clusters"
                % (self.processing_time, np.amax(clustering.labels_))
            )

        polygons = []

        # set up a progress bar
        for i in tqdm(np.arange(np.amax(clustering.labels_))):
            if callback is not None:
                callback(i, np.amax(clustering.labels_))
            x_cluster = points[:, 0][np.where(clustering.labels_ == i)]
            y_cluster = points[:, 1][np.where(clustering.labels_ == i)]
            sample = np.vstack((x_cluster, y_cluster)).T

            alpha_opt = configure.getfloat("Constants", "default_alpha_shape")
            alpha_shape = alphashape.alphashape(sample, alpha_opt)

            # # ignore the polygons that are too big
            if alpha_shape.area > configure.getint("Parameters", "max_polygon_area"):
                continue

            # optimize the alpha for polygons in fitting size
            if alpha_shape.area > configure.getint("Parameters", "min_polygon_area"):
                sample_size = sample.shape[0]
                """
                if polygon's area is bigger than an single estimated tree area, that means there are more than one tree in the cluster 
                In this case, we want to use optimized alpha, and downscale the points to speed up the process
                """
                if sample_size > configure.getint("Parameters", "alphashape_reduction"):
                    reduce_shape_size = (
                        lambda x: int(x) if x <= configure.getint(
                            "Parameters", "alphashape_reduction") else reduce_shape_size(x / 10)
                    )
                    desired_size = reduce_shape_size(sample_size)
                    down_sample_index = np.random.choice(
                        np.arange(sample_size), desired_size
                    )
                    # use optimized alpha shape value
                    alpha_shape = alphashape.alphashape(
                        sample[down_sample_index])

            if alpha_shape.geom_type == configure.get(
                "Constants", "alpha_shape_multipolygon_type"
            ):
                # sometimes there will be more than one polygons from alpha shape.
                for each_polyon in alpha_shape:
                    polygons.append(
                        self.__get_polygon_from_feature(each_polyon))
            elif alpha_shape.geom_type == configure.get(
                "Constants", "alpha_shape_polygon_type"
            ):
                polygons.append(self.__get_polygon_from_feature(alpha_shape))

        return polygons
Ejemplo n.º 15
0
    def __init__(self, *args, **kwargs):
        super(MainWindow, self).__init__(*args, **kwargs)
        uic.loadUi("../pipeline.ui", self)
        self.labelled_pipeline = None
        self.unlabelled_pipeline = None
        self.plotter = GraphGUI()
        self.timer = QElapsedTimer()

        # set keyboard shortcut to stop the program.
        self.shortcut_close = QShortcut(QKeySequence("Ctrl+Q"), self)
        self.shortcut_close.activated.connect(self.__close_app)

        self.labelled_parameter_update = False
        self.unlabelled_parameter_update = False

        self.tabWidget.setCurrentIndex(0)
        self.__on_click_reset()
        self.tabWidget.setCurrentIndex(1)
        self.__on_click_reset()
        self.tabWidget.setCurrentIndex(2)
        self.__on_click_reset()

        # labelled lidar pipeline tab
        # default dbscan related parameters
        self.pushButton_down_size_update.clicked.connect(
            self.__on_click_dbscan_update)
        self.pushButton_dbscan_update.clicked.connect(
            self.__on_click_dbscan_update)
        # default alphashape related parameters
        self.pushButton_shape_update.clicked.connect(
            self.__on_click_alphashape_update)

        # unlabelled lidar pipeline tab
        self.pushButton_pre_process_update_unlabel.clicked.connect(
            self.__on_click_unlabelled_pre_process_update)
        self.pushButton_dbscan_update_unlabel.clicked.connect(
            self.__on_click_unlabelled_dbscan_update)
        self.pushButton_rgbvi_update.clicked.connect(
            self.__on_click_unlabelled_rgbvi_update)

        # orthophoto pipeline tab
        self.pushButton_stddev.clicked.connect(
            self.__on_click_orthophoto_stddev)
        self.pushButton_hsv_min_update.clicked.connect(
            self.__on_click_orthophoto_hsv_update)
        self.pushButton_hsv_max_update.clicked.connect(
            self.__on_click_orthophoto_hsv_update)

        # process controll buttons connect callbacks
        self.process_control.button(QDialogButtonBox.Reset).clicked.connect(
            self.__on_click_reset)
        self.process_control.button(QDialogButtonBox.SaveAll).clicked.connect(
            self.__on_click_save_all)
        self.process_control.button(QDialogButtonBox.Apply).clicked.connect(
            self.__on_click_apply)
        self.process_control.button(QDialogButtonBox.Close).clicked.connect(
            self.__close_app)

        # set up default display information
        self.__process_output_update()
        self.__test_output_update()
        self.__tooltip_setup()

        # load the default plot
        self.webEngineView.reload()
        self.webEngineView.setUrl(
            QUrl("file://" + os.path.abspath(
                configure.get("Constants", "sample_plot_html_file_path"))))

        self.tabWidget.setCurrentIndex(0)
        self.statusBar().showMessage("Ready")
        self.show()
Ejemplo n.º 16
0
import json
from download import download_lidar_dataset, unzip_files
from config import configure
from pathlib import Path
Path(configure.get("Download", "dest_dir_path")).mkdir(parents=True,
                                                       exist_ok=True)
download_lidar_dataset(configure.get("Download", "dest_dir_path"),
                       json.loads(configure.get("Download", "tiles")))
unzip_files(configure.get("Download", "dest_dir_path"))
Ejemplo n.º 17
0
    def __tooltip_setup(self):
        # labelled tooltips
        self.scrollAreaWidget_lidar_labelled.findChild(
            QLineEdit,
            "lineEdit_eps").setToolTip(configure.get("ToolTips", "eps"))
        self.scrollAreaWidget_lidar_labelled.findChild(
            QLineEdit, "lineEdit_down_size").setToolTip(
                configure.get("ToolTips", "down_size"))
        self.scrollAreaWidget_lidar_labelled.findChild(
            QLineEdit, "lineEdit_min_sample").setToolTip(
                configure.get("ToolTips", "min_sample"))
        self.scrollAreaWidget_lidar_labelled.findChild(
            QLineEdit, "lineEdit_alpha").setToolTip(
                configure.get("ToolTips", "alphashape_reduction"))
        self.scrollAreaWidget_lidar_labelled.findChild(
            QLineEdit, "lineEdit_min_area").setToolTip(
                configure.get("ToolTips", "min_polygon_area"))
        self.scrollAreaWidget_lidar_labelled.findChild(
            QLineEdit, "lineEdit_max_area").setToolTip(
                configure.get("ToolTips", "max_polygon_area"))

        # unlabelled tooltips
        self.scrollAreaWidget_lidar_unlabelled.findChild(
            QLineEdit, "lineEdit_uniform_k_points").setToolTip(
                unlabelled_configure.get("ToolTips", "uniform_down_k_point"))
        self.scrollAreaWidget_lidar_unlabelled.findChild(
            QLineEdit, "lineEdit_distance_threshold").setToolTip(
                unlabelled_configure.get("ToolTips", "distance_threshold"))
        self.scrollAreaWidget_lidar_unlabelled.findChild(
            QLineEdit, "lineEdit_ground_diff").setToolTip(
                unlabelled_configure.get("ToolTips", "ground_threshold"))
        self.scrollAreaWidget_lidar_unlabelled.findChild(
            QLineEdit, "lineEdit_eps_unlabel").setToolTip(
                unlabelled_configure.get("ToolTips", "eps"))
        self.scrollAreaWidget_lidar_unlabelled.findChild(
            QLineEdit, "lineEdit_min_sample_unlabel").setToolTip(
                unlabelled_configure.get("ToolTips", "min_points"))
        self.scrollAreaWidget_lidar_unlabelled.findChild(
            QLineEdit, "lineEdit_rgbvi").setToolTip(
                unlabelled_configure.get("ToolTips", "rgbvi_threshold"))

        # orthophoto tooltips
        self.scrollAreaWidget_orthophoto.findChild(
            QLineEdit, "lineEdit_stddev").setToolTip(
                orthophoto_configure.get("ToolTips",
                                         "standard_deviation_threshold"))
        self.scrollAreaWidget_orthophoto.findChild(
            QLineEdit, "lineEdit_h_min").setToolTip(
                orthophoto_configure.get("ToolTips", "hsv_min"))
        self.scrollAreaWidget_orthophoto.findChild(
            QLineEdit, "lineEdit_s_min").setToolTip(
                orthophoto_configure.get("ToolTips", "hsv_min"))
        self.scrollAreaWidget_orthophoto.findChild(
            QLineEdit, "lineEdit_v_min").setToolTip(
                orthophoto_configure.get("ToolTips", "hsv_min"))
        self.scrollAreaWidget_orthophoto.findChild(
            QLineEdit, "lineEdit_h_max").setToolTip(
                orthophoto_configure.get("ToolTips", "hsv_max"))
        self.scrollAreaWidget_orthophoto.findChild(
            QLineEdit, "lineEdit_s_max").setToolTip(
                orthophoto_configure.get("ToolTips", "hsv_max"))
        self.scrollAreaWidget_orthophoto.findChild(
            QLineEdit, "lineEdit_v_max").setToolTip(
                orthophoto_configure.get("ToolTips", "hsv_max"))

        # general tooltips
        self.label_test_output.setToolTip(
            configure.get("ToolTips", "test_output"))
        self.label_process_output.setToolTip(
            configure.get("ToolTips", "process_output"))