def test_is_wgs84_crs(self):
        config = CubeConfig(dataset_name='S2L2A',
                            geometry=(10.11, 54.17, 10.14, 54.19),
                            spatial_res=0.00001,
                            time_range=('2019-01-01', '2019-01-02'))
        self.assertEqual(True, config.is_wgs84_crs)

        config = CubeConfig(dataset_name='S2L2A',
                            geometry=(10.11, 54.17, 10.14, 54.19),
                            spatial_res=0.00001,
                            crs='http://www.opengis.net/def/crs/OGC/1.3/CRS84',
                            time_range=('2019-01-01', '2019-01-02'))
        self.assertEqual(True, config.is_wgs84_crs)

        config = CubeConfig(dataset_name='S2L2A',
                            geometry=(10.11, 54.17, 10.14, 54.19),
                            spatial_res=0.00001,
                            crs='http://www.opengis.net/def/crs/EPSG/0/4326',
                            time_range=('2019-01-01', '2019-01-02'))
        self.assertEqual(True, config.is_wgs84_crs)

        config = CubeConfig(dataset_name='S2L2A',
                            geometry=(10.11, 54.17, 10.14, 54.19),
                            spatial_res=0.00001,
                            crs='http://www.opengis.net/def/crs/EPSG/0/3035',
                            time_range=('2019-01-01', '2019-01-02'))
        self.assertEqual(False, config.is_wgs84_crs)
Beispiel #2
0
    def test_adjust_sizes(self):
        spatial_res = 0.00018
        common_kwargs = dict(dataset_name='S2L2A',
                             band_names=('B01', 'B02', 'B03'),
                             spatial_res=spatial_res,
                             tile_size=(512, 512),
                             time_range=('2019-01-01', '2019-01-02'))

        # size will be smaller than chunk sizes
        config = CubeConfig(geometry=(10.11, 54.17, 10.14, 54.19),
                            **common_kwargs)
        w, h = config.size
        x1, y1, x2, y2 = config.geometry
        self.assertEqual((167, 111), (w, h))
        self.assertEqual((167, 111), config.tile_size)
        self.assertEqual((1, 1), config.num_tiles)
        self.assertAlmostEqual(10.11, x1)
        self.assertAlmostEqual(10.14006, x2, places=4)
        self.assertAlmostEqual(54.17, y1)
        self.assertAlmostEqual(54.18998, y2, places=4)
        self.assertEqual(w, round((x2 - x1) / spatial_res))
        self.assertEqual(h, round((y2 - y1) / spatial_res))

        # size will be smaller than 2x chunk sizes
        config = CubeConfig(geometry=(10.11, 54.17, 10.2025, 54.3),
                            **common_kwargs)
        w, h = config.size
        x1, y1, x2, y2 = config.geometry
        self.assertEqual((514, 722), (w, h))
        self.assertEqual((514, 722), config.tile_size)
        self.assertEqual((1, 1), config.num_tiles)
        self.assertAlmostEqual(10.11, x1)
        self.assertAlmostEqual(10.20252, x2, places=4)
        self.assertAlmostEqual(54.17, y1)
        self.assertAlmostEqual(54.29996, y2, places=4)
        self.assertEqual(w, round((x2 - x1) / spatial_res))
        self.assertEqual(h, round((y2 - y1) / spatial_res))

        # size will be larger than or equal 2x chunk sizes
        config = CubeConfig(geometry=(10.11, 54.17, 10.5, 54.5),
                            **common_kwargs)
        w, h = config.size
        x1, y1, x2, y2 = config.geometry
        self.assertEqual((2560, 2048), (w, h))
        self.assertEqual((512, 512), config.tile_size)
        self.assertEqual((5, 4), config.num_tiles)
        self.assertAlmostEqual(10.11, x1)
        self.assertAlmostEqual(10.57080, x2, places=4)
        self.assertAlmostEqual(54.17, y1)
        self.assertAlmostEqual(54.53864, y2, places=4)
        self.assertEqual(w, round((x2 - x1) / spatial_res))
        self.assertEqual(h, round((y2 - y1) / spatial_res))
Beispiel #3
0
 def test_from_and_to_dict(self):
     config = CubeConfig.from_dict(
         dict(dataset_name='S2L2A',
              band_names=('B01', 'B02', 'B03'),
              geometry=(10.11, 54.17, 10.14, 54.19),
              spatial_res=0.00001,
              tile_size=(512, 512),
              time_range=('2019-01-01', '2019-01-02')))
     self.assertEqual(
         {
             'band_names': ('B01', 'B02', 'B03'),
             'band_sample_types':
             None,
             'band_units':
             None,
             'collection_id':
             None,
             'crs':
             'http://www.opengis.net/def/crs/EPSG/0/4326',
             'dataset_name':
             'S2L2A',
             'four_d':
             False,
             'geometry': (10.11, 54.17, 10.14072, 54.19048),
             'spatial_res':
             1e-05,
             'tile_size': (512, 512),
             'time_period':
             None,
             'time_range':
             ('2019-01-01T00:00:00+00:00', '2019-01-02T00:00:00+00:00'),
             'time_tolerance':
             '0 days 00:10:00'
         }, config.as_dict())
Beispiel #4
0
 def get_cube_config(self):
     return CubeConfig(dataset_name='S2L2A',
                       bbox=(10.2, 53.5, 10.3, 53.6),
                       spatial_res=0.1 / 4000,
                       time_range=('2017-08-01', '2017-08-31'),
                       time_period=None,
                       four_d=False)
 def test_band_names_may_be_null(self):
     config = CubeConfig(dataset_name='S2L2A',
                         band_names=None,
                         bbox=(10.11, 54.17, 10.14, 54.19),
                         spatial_res=0.00001,
                         time_range=('2019-01-01', '2019-01-02'))
     self.assertEqual(None, config.band_names)
Beispiel #6
0
 def get_cube_config(self):
     return CubeConfig(dataset_name='S2L1C',
                       band_names=['B01', 'B08', 'B12'],
                       bbox=(10.2, 53.5, 10.3, 53.6),
                       spatial_res=0.1 / 4000,
                       time_range=('2017-08-01', '2017-08-31'),
                       time_period='1D',
                       four_d=False)
    def test_time_deltas(self):
        config = CubeConfig.from_dict(
            dict(dataset_name='S2L2A',
                 band_names=('B01', 'B02', 'B03'),
                 bbox=(10.11, 54.17, 10.14, 54.19),
                 spatial_res=0.00001,
                 time_range=('2019-01-01', '2019-01-02')))
        self.assertEqual(None, config.time_period)
        self.assertEqual(pd.Timedelta('0 days 00:10:00'),
                         config.time_tolerance)

        config = CubeConfig.from_dict(
            dict(dataset_name='S2L2A',
                 band_names=('B01', 'B02', 'B03'),
                 bbox=(10.11, 54.17, 10.14, 54.19),
                 spatial_res=0.00001,
                 time_period='8D',
                 time_range=('2019-01-01', '2019-01-02')))
        self.assertEqual(pd.Timedelta('8 days 00:00:00'), config.time_period)
        self.assertEqual(None, config.time_tolerance)

        config = CubeConfig.from_dict(
            dict(dataset_name='S2L2A',
                 band_names=('B01', 'B02', 'B03'),
                 bbox=(10.11, 54.17, 10.14, 54.19),
                 spatial_res=0.00001,
                 time_tolerance='1H',
                 time_range=('2019-01-01', '2019-01-02')))
        self.assertEqual(None, config.time_period)
        self.assertEqual(pd.Timedelta('0 days 01:00:00'),
                         config.time_tolerance)

        config = CubeConfig.from_dict(
            dict(dataset_name='S2L2A',
                 band_names=('B01', 'B02', 'B03'),
                 bbox=(10.11, 54.17, 10.14, 54.19),
                 spatial_res=0.00001,
                 time_period='8D',
                 time_tolerance='1H',
                 time_range=('2019-01-01', '2019-01-02')))
        self.assertEqual(pd.Timedelta('8 days 00:00:00'), config.time_period)
        self.assertEqual(pd.Timedelta('0 days 01:00:00'),
                         config.time_tolerance)
Beispiel #8
0
def gen(dataset, output_path, band_names, tile_size, geometry, spatial_res,
        crs, time_range, time_period, time_tolerance, four_d, verbose):
    """
    Generate a data cube from SentinelHub.

    By default, the command will create a ZARR dataset with 3D arrays
    for each band e.g. "B01", "B02" with dimensions "time", "lat", "lon".
    Use option "--4d" to write a single 4D array "band_data"
    with dimensions "time", "lat", "lon", "band".
    """
    import os.path
    import time
    import xarray as xr
    from xcube_sh.config import CubeConfig
    from xcube_sh.observers import Observers
    from xcube_sh.sentinelhub import SentinelHub
    from xcube_sh.store import SentinelHubStore

    if os.path.exists(output_path):
        raise click.ClickException(
            f'Output {output_path} already exists. Move it away first.')

    cube_config = CubeConfig(dataset_name=dataset,
                             band_names=band_names,
                             tile_size=tile_size,
                             geometry=geometry,
                             spatial_res=spatial_res,
                             crs=crs,
                             time_range=time_range,
                             time_period=time_period,
                             time_tolerance=time_tolerance,
                             four_d=four_d,
                             exception_type=click.ClickException)

    sentinel_hub = SentinelHub()

    print(f'Writing cube to {output_path}...')

    t0 = time.perf_counter()
    store = SentinelHubStore(sentinel_hub, cube_config)
    request_collector = Observers.request_collector()
    store.add_observer(request_collector)
    if verbose:
        store.add_observer(Observers.request_dumper())
    cube = xr.open_zarr(store)
    cube.to_zarr(output_path)
    duration = time.perf_counter() - t0

    print(f"Cube written to {output_path}, took {'%.2f' % duration} seconds.")

    if verbose:
        request_collector.stats.dump()
Beispiel #9
0
    def test_from_dict_invalids(self):
        with self.assertRaises(ValueError) as cm:
            CubeConfig.from_dict(
                dict(dataset_name='S2L2A',
                     band_names=('B01', 'B02', 'B03'),
                     geometry=(10.11, 54.17, 10.14, 54.19),
                     special_res=0.00001,
                     tile_size=(512, 512),
                     time_range=('2019-01-01', '2019-01-02')))
        self.assertEqual(
            "Found invalid parameter 'special_res' in cube configuration",
            f'{cm.exception}')

        with self.assertRaises(ValueError) as cm:
            CubeConfig.from_dict(
                dict(dataset_name='S2L2A',
                     band_names=('B01', 'B02', 'B03'),
                     geometrix=(10.11, 54.17, 10.14, 54.19),
                     special_res=0.00001,
                     tile_size=(512, 512),
                     time_range=('2019-01-01', '2019-01-02')))
        self.assertEqual(
            "Found invalid parameters in cube configuration: 'geometrix', 'special_res'",
            f'{cm.exception}')
Beispiel #10
0
    "2020-05-07"
]

dates = dates_ruhr
aoi = aoi_ruhr

IPython.display.GeoJSON(shapely.geometry.box(*aoi).__geo_interface__)

## Do Calculation for one time stamp

# Get Sentinel-2 L2A data

cube_con = CubeConfig(dataset_name=dataset,
                      band_names=band_names,
                      tile_size=[512, 512],
                      geometry=aoi,
                      spatial_res=spatial_res,
                      time_range=[dates[0], dates[1]],
                      time_period=time_period)
cube = open_cube(cube_con)
scl = MaskSet(cube.SCL)
cube = cube.where((scl.clouds_high_probability) == 0)

#date = dates[-1] # do exmaple calculation for last timestamp of dates
date = dates[0]
#timestamp1 = cube.sel(time = cube.time[-1])
timestamp1 = cube.sel(time=cube.time[0])
timestamp1.B02.plot.imshow(vmin=0, vmax=0.2, figsize=[16, 8])

# Compute a roads mask using band ratios and thresholds
Beispiel #11
0
    def open_data(self, data_id: str, **open_params) -> xr.Dataset:
        """
        Opens the dataset with given *data_id* and *open_params*.

        Possible values for *data_id* can be retrieved from the :meth:SentinelHubDataStore::get_data_ids method.
        Possible keyword-arguments in *open_params* are:

        * ``variable_names: Sequence[str]`` - optional list of variable names.
            If not given, all variables are included.
        * ``variable_units: Union[str, Sequence[str]]`` - units for all or each variable
        * ``variable_sample_types: Union[str, Sequence[str]]`` - sample types for all or each variable
        * ``crs: str`` - spatial CRS identifier. must be a valid OGC CRS URI.
        * ``tile_size: Tuple[int, int]`` - optional tuple of spatial tile sizes in pixels.
        * ``bbox: Tuple[float, float, float, float]`` - spatial coverage given as (minx, miny, maxx, maxy)
            in units of the CRS. Required parameter.
        * ``spatial_res: float`` - spatial resolution in unsits of the CRS^.
            Required parameter.
        * ``time_range: Tuple[Optional[str], Optional[str]]`` - tuple (start-time, end-time).
            Both start-time and end-time, if given, should use ISO 8601 format.
            Required parameter.
        * ``time_period: str`` - Pandas-compatible time period/frequency, e.g. "4D", "2W"
        * ``time_tolerance: str`` - Maximum time tolerance. Pandas-compatible time period/frequency.
        * ``collection_id: str`` - An identifier used by Sentinel HUB to identify BYOC datasets.
        * ``four_d: bool`` - If True, variables will represented as fourth dimension.

        In addition, all store parameters can be used, if the data opener is used on its own.
        See :meth:SentinelHubDataStore::get_data_store_params_schema method.

        :param data_id: The data identifier.
        :param open_params: Open parameters.
        :return: An xarray.Dataset instance
        """
        assert_not_none(data_id, 'data_id')

        schema = self.get_open_data_params_schema(data_id)
        schema.validate_instance(open_params)

        sentinel_hub = self._sentinel_hub
        if sentinel_hub is None:
            sh_kwargs, open_params = schema.process_kwargs_subset(
                open_params, (
                    'client_id',
                    'client_secret',
                    'api_url',
                    'oauth2_url',
                    'enable_warnings',
                    'error_policy',
                    'num_retries',
                    'retry_backoff_max',
                    'retry_backoff_base',
                ))
            sentinel_hub = SentinelHub(**sh_kwargs)

        cube_config_kwargs, open_params = schema.process_kwargs_subset(
            open_params, (
                'variable_names',
                'variable_units',
                'variable_sample_types',
                'crs',
                'tile_size',
                'bbox',
                'spatial_res',
                'time_range',
                'time_period',
                'time_tolerance',
                'collection_id',
                'four_d',
            ))

        chunk_store_kwargs, open_params = schema.process_kwargs_subset(
            open_params, ('observer', 'trace_store_calls'))

        band_names = cube_config_kwargs.pop('variable_names', None)
        band_units = cube_config_kwargs.pop('variable_units', None)
        band_sample_types = cube_config_kwargs.pop('variable_sample_types',
                                                   None)
        cube_config = CubeConfig(dataset_name=data_id,
                                 band_names=band_names,
                                 band_units=band_units,
                                 band_sample_types=band_sample_types,
                                 **cube_config_kwargs)
        chunk_store = SentinelHubChunkStore(sentinel_hub, cube_config,
                                            **chunk_store_kwargs)
        max_cache_size = open_params.pop('max_cache_size', None)
        if max_cache_size:
            chunk_store = zarr.LRUStoreCache(chunk_store,
                                             max_size=max_cache_size)
        return xr.open_zarr(chunk_store, **open_params)
Beispiel #12
0
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

import unittest

import xarray as xr

from test.test_sentinelhub import HAS_SH_CREDENTIALS
from test.test_sentinelhub import REQUIRE_SH_CREDENTIALS
from xcube_sh.sentinelhub import SentinelHub
from xcube_sh.config import CubeConfig
from xcube_sh.cube import open_cube

cube_config = CubeConfig(dataset_name='S2L1C',
                         band_names=['B04'],
                         bbox=(10.00, 54.27, 11.00, 54.60),
                         spatial_res=0.00018,
                         time_range=('2018-05-14', '2018-07-31'),
                         time_tolerance='30M')


@unittest.skipUnless(HAS_SH_CREDENTIALS, REQUIRE_SH_CREDENTIALS)
class CubeTest(unittest.TestCase):
    def test_open_cube(self):
        cube = open_cube(cube_config=cube_config)
        self.assertIsInstance(cube, xr.Dataset)


class CubeTest2(unittest.TestCase):
    def test_open_cube_with_illegal_kwargs(self):
        with self.assertRaises(ValueError) as cm:
            open_cube(
Beispiel #13
0
def gen(dataset, output_path, cube_config_path, source_config_path,
        dest_config_path, band_names, tile_size, geometry, spatial_res, crs,
        time_range, time_period, time_tolerance, four_d, verbose):
    """
    Generate a data cube from SentinelHub.

    By default, the command will create a ZARR dataset with 3D arrays
    for each band e.g. "B01", "B02" with dimensions "time", "lat", "lon".
    Use option "--4d" to write a single 4D array "band_data"
    with dimensions "time", "lat", "lon", "band".
    """
    import os.path
    import time
    import xarray as xr
    from xcube_sh.config import CubeConfig
    from xcube_sh.observers import Observers
    from xcube_sh.sentinelhub import SentinelHub
    from xcube_sh.store import SentinelHubStore

    if os.path.exists(output_path):
        raise click.ClickException(
            f'Output {output_path} already exists. Move it away first.')

    cube_config_dict = _load_config_dict(cube_config_path)
    source_config_dict = _load_config_dict(source_config_path)
    dest_config_dict = _load_config_dict(dest_config_path)

    cube_config_dict.update({
        k: v
        for k, v in dict(dataset_name=dataset,
                         band_names=band_names,
                         tile_size=tile_size,
                         geometry=geometry,
                         spatial_res=spatial_res,
                         crs=crs,
                         time_range=time_range,
                         time_period=time_period,
                         time_tolerance=time_tolerance,
                         four_d=four_d).items() if v is not None
    })

    cube_config = CubeConfig.from_dict(cube_config_dict,
                                       exception_type=click.ClickException)

    # TODO: validate source_config_dict
    sentinel_hub = SentinelHub(**source_config_dict)

    print(f'Writing cube to {output_path}...')

    # TODO: validate dest_config_dict
    # TODO: use dest_config_dict and output_path to determine actuial output, which may be AWS S3
    t0 = time.perf_counter()
    store = SentinelHubStore(sentinel_hub, cube_config)
    request_collector = Observers.request_collector()
    store.add_observer(request_collector)
    if verbose:
        store.add_observer(Observers.request_dumper())
    cube = xr.open_zarr(store)
    cube.to_zarr(output_path, **dest_config_dict)
    duration = time.perf_counter() - t0

    print(f"Cube written to {output_path}, took {'%.2f' % duration} seconds.")

    if verbose:
        request_collector.stats.dump()
Beispiel #14
0
place = area
for i in range(2):
    if (date2020 == False):
        # 2019
        data = "2019"
        date_x = dates[0]
        date_y = dates[1]
    else:
        data = "2020"
        date_x = dates[2]
        date_y = dates[3]
    IPython.display.GeoJSON(shapely.geometry.box(*aoi).__geo_interface__)
    cube_con = CubeConfig(
        dataset_name=dataset,
        band_names=band_names,
        tile_size=[512, 512],
        geometry=aoi,  # area of interest
        spatial_res=spatial_res,  #0.00009
        time_range=[date_x, date_y],
        time_period=time_period)  # what is time tolerance

    cube = open_cube(cube_con, **hc)
    scl = MaskSet(cube.SCL)

    #cube = cube.where((scl.clouds_high_probability) == 0) #shows weird errors

    cube = cube.where(
        (scl.clouds_high_probability + scl.clouds_medium_probability +
         scl.clouds_low_probability_or_unclassified + scl.cirrus) == 0)
    date = dates[0]
    t1 = cube.sel(time=cube.time[0])
    B02 = t1.B02
Beispiel #15
0
dates = ["2020-01-01", "2020-05-10"]

aoi = aoi_ruhr

IPython.display.GeoJSON(shapely.geometry.box(*aoi).__geo_interface__)

## Do Calculation for one time stamp

pixels = []

# Get Sentinel-2 L2A data

cube_con = CubeConfig(dataset_name=dataset,
                      band_names=band_names,
                      tile_size=[512, 512],
                      geometry=aoi,
                      spatial_res=spatial_res,
                      time_range=[dates[0], dates[1]],
                      time_period=time_period)
cube = open_cube(cube_con)

# Mask out clouds
scl = MaskSet(cube.SCL)
cube = cube.where(
    (scl.clouds_high_probability + scl.clouds_medium_probability +
     scl.clouds_low_probability_or_unclassified + scl.cirrus) == 0)

dates = cube.time.values

for date in dates:
Beispiel #16
0
 def test_deprecated_geometry_still_works(self):
     config = CubeConfig(dataset_name='S2L2A',
                         geometry=(10.11, 54.17, 10.14, 54.19),
                         spatial_res=0.00001,
                         time_range=('2019-01-01', '2019-01-02'))
     self.assertEqual(config.geometry, config.bbox)
Beispiel #17
0
def gen(request: Optional[str],
        dataset_name: Optional[str],
        band_names: Optional[Tuple],
        tile_size: Optional[str],
        geometry: Optional[str],
        spatial_res: Optional[float],
        crs: Optional[str],
        time_range: Optional[str],
        time_period: Optional[str],
        time_tolerance: Optional[str],
        output_path: Optional[str],
        four_d: bool,
        verbose: bool):
    """
    Generate a data cube from SENTINEL Hub.

    By default, the command will create a Zarr dataset with 3D arrays
    for each band e.g. "B01", "B02" with dimensions "time", "lat", "lon".
    Use option "--4d" to write a single 4D array "band_data"
    with dimensions "time", "lat", "lon", "band".

    Please use command "xcube sh req" to generate example request files that can be passed as REQUEST.
    REQUEST may have JSON or YAML format.
    You can also pipe a JSON request into this command. In this case
    """
    import json
    import os.path
    import sys
    import xarray as xr
    from xcube.core.dsio import write_dataset
    from xcube.util.perf import measure_time
    from xcube_sh.config import CubeConfig
    from xcube_sh.observers import Observers
    from xcube_sh.sentinelhub import SentinelHub
    from xcube_sh.chunkstore import SentinelHubChunkStore

    if request:
        request_dict = _load_request(request)
    elif not sys.stdin.isatty():
        request_dict = json.load(sys.stdin)
    else:
        request_dict = {}

    cube_config_dict = request_dict.get('cube_config', {})
    _overwrite_config_params(cube_config_dict,
                             dataset_name=dataset_name,
                             band_names=band_names if band_names else None,  # because of multiple=True
                             tile_size=tile_size,
                             geometry=geometry,
                             spatial_res=spatial_res,
                             crs=crs,
                             time_range=time_range,
                             time_period=time_period,
                             time_tolerance=time_tolerance,
                             four_d=four_d)

    input_config_dict = request_dict.get('input_config', {})
    if 'datastore_id' in input_config_dict:
        input_config_dict = dict(input_config_dict)
        datastore_id = input_config_dict.pop('datastore_id')
        if datastore_id != 'sentinelhub':
            warnings.warn(f'Unknown datastore_id={datastore_id!r} encountered in request. Ignoring it...')
    # _overwrite_config_params(input_config_dict, ...)
    # TODO: validate input_config_dict

    output_config_dict = request_dict.get('output_config', {})
    _overwrite_config_params(output_config_dict,
                             path=output_path)
    # TODO: validate output_config_dict

    cube_config = CubeConfig.from_dict(cube_config_dict,
                                       exception_type=click.ClickException)

    if 'path' in output_config_dict:
        output_path = output_config_dict.pop('path')
    else:
        output_path = DEFAULT_GEN_OUTPUT_PATH
    if not _is_bucket_url(output_path) and os.path.exists(output_path):
        raise click.ClickException(f'Output {output_path} already exists. Move it away first.')

    sentinel_hub = SentinelHub(**input_config_dict)

    print(f'Writing cube to {output_path}...')

    with measure_time() as cm:
        store = SentinelHubChunkStore(sentinel_hub, cube_config)
        request_collector = Observers.request_collector()
        store.add_observer(request_collector)
        if verbose:
            store.add_observer(Observers.request_dumper())
        cube = xr.open_zarr(store)
        if _is_bucket_url(output_path):
            client_kwargs = {k: output_config_dict.pop(k)
                             for k in ('provider_access_key_id', 'provider_secret_access_key')
                             if k in output_config_dict}
            write_dataset(cube, output_path, format_name='zarr', client_kwargs=client_kwargs, **output_config_dict)
        else:
            write_dataset(cube, output_path, **output_config_dict)

    print(f"Cube written to {output_path}, took {'%.2f' % cm.duration} seconds.")

    if verbose:
        request_collector.stats.dump()