def _decode_tiles(self, tiles_with_encoded_data):
        """
         * Decodes the PBF data from all the specified tiles and reports the progress
         * If a tile is loaded from the cache, the decoded_data is already set and doesn't have to be encoded
        :param tiles_with_encoded_data:
        :return:
        """
        clip_tiles = not self._loading_options["inspection_mode"]
        tiles_with_encoded_data = [(t[0], self._unzip(t[1]), clip_tiles)
                                   for t in tiles_with_encoded_data]

        if can_load_lib():
            decoder_func = decode_tile_native
        else:
            decoder_func = decode_tile_python

        tiles = []

        tile_data_tuples = []

        if len(tiles_with_encoded_data) <= self._nr_tiles_to_process_serial:
            for t in tiles_with_encoded_data:
                tile, decoded_data = decoder_func(t)
                if decoded_data:
                    tile_data_tuples.append((tile, decoded_data))
        else:
            pool = self._get_pool()
            rs = pool.map_async(decoder_func,
                                tiles_with_encoded_data,
                                callback=tile_data_tuples.extend)
            pool.close()
            current_progress = 0
            nr_of_tiles = len(tiles_with_encoded_data)
            self._update_progress(
                max_progress=nr_of_tiles,
                msg="Decoding {} tiles...".format(nr_of_tiles))
            while not rs.ready() and not self.cancel_requested:
                QApplication.processEvents()
                remaining = rs._number_left
                index = nr_of_tiles - remaining
                progress = int(100.0 / nr_of_tiles * (index + 1))
                if progress != current_progress:
                    current_progress = progress
                    self._update_progress(progress=progress)
            if self.cancel_requested:
                pool.terminate()
            pool.join()

        tile_data_tuples = sorted(tile_data_tuples, key=lambda t: t[0].id())
        groups = groupby(tile_data_tuples, lambda t: t[0].id())
        for key, group in groups:
            tile = None
            data = {}
            for t, decoded_data in list(group):
                if not decoded_data:
                    continue

                if not tile:
                    tile = t
                for layer_name in decoded_data:
                    data[layer_name] = decoded_data[layer_name]
            tile.decoded_data = data
            tiles.append(tile)

        info("Decoding finished, {} tiles with data", len(tiles))
        return tiles
    def _load_tiles(self):
        # recreate source to assure the source belongs to the new thread, SQLite3 isn't happy about it otherwise
        self._source = self._create_source(self.connection())

        try:
            if can_load_lib():
                info("Native decoding supported!!!")
            else:
                bits = "32"
                if sys.maxsize > 2**32:
                    bits = "64"
                info("Native decoding not supported: {}, {}bit", sys.platform,
                     bits)

            self._feature_count = 0
            self._all_tiles = []

            bounds = self._loading_options["bounds"]
            clip_tiles = self._loading_options["clip_tiles"]
            max_tiles = self._loading_options["max_tiles"]
            layer_filter = self._loading_options["layer_filter"]
            info("Tile limit enabled: {}", max_tiles is not None
                 and max_tiles > 0)
            self.cancel_requested = False
            self.feature_collections_by_layer_name_and_geotype = {}
            self._update_progress(show_dialog=True)
            self._clip_tiles_at_tile_bounds = clip_tiles

            zoom_level = self._get_clamped_zoom_level()

            all_tiles = get_all_tiles(
                bounds=bounds,
                is_cancel_requested_handler=lambda: self.cancel_requested,
            )
            tiles_to_load = set()
            cached_tiles = []
            tiles_to_ignore = set()
            source_name = self._source.name()
            scheme = self._source.scheme()
            for t in all_tiles:
                if self.cancel_requested or (max_tiles and
                                             len(cached_tiles) >= max_tiles):
                    break

                decoded_data = get_cache_entry(cache_name=source_name,
                                               zoom_level=zoom_level,
                                               x=t[0],
                                               y=t[1])
                if decoded_data:
                    tile = VectorTile(scheme=scheme,
                                      zoom_level=zoom_level,
                                      x=t[0],
                                      y=t[1])
                    tile.decoded_data = decoded_data
                    cached_tiles.append(tile)
                    tiles_to_ignore.add((tile.column, tile.row))
                else:
                    tiles_to_load.add(t)

            remaining_nr_of_tiles = len(tiles_to_load)
            if max_tiles:
                if len(cached_tiles) + len(tiles_to_load) >= max_tiles:
                    remaining_nr_of_tiles = clamp(max_tiles -
                                                  len(cached_tiles),
                                                  low=0)
            info("{} tiles in cache. Max. {} will be loaded additionally.",
                 len(cached_tiles), remaining_nr_of_tiles)
            if len(cached_tiles) > 0:
                if not self.cancel_requested:
                    self._process_tiles(cached_tiles, layer_filter)
                    self._all_tiles.extend(cached_tiles)

            debug("Loading data for zoom level '{}' source '{}'", zoom_level,
                  self._source.name())

            if remaining_nr_of_tiles:
                tile_data_tuples = self._source.load_tiles(
                    zoom_level=zoom_level,
                    tiles_to_load=tiles_to_load,
                    max_tiles=remaining_nr_of_tiles)
                if len(tile_data_tuples) > 0 and not self.cancel_requested:
                    tiles = self._decode_tiles(tile_data_tuples)
                    self._process_tiles(tiles, layer_filter)
                    for t in tiles:
                        cache_tile(cache_name=source_name,
                                   zoom_level=zoom_level,
                                   x=t.column,
                                   y=t.row,
                                   decoded_data=t.decoded_data)
                    self._all_tiles.extend(tiles)
            self._continue_loading()

        except Exception as e:
            tb = ""
            if traceback:
                tb = traceback.format_exc()
            critical("An exception occured: {}, {}", e, tb)
            self.cancelled.emit()