def download():
    """Download data files from ESA and store them in the configured storage
    directory.
    """

    # Load download configuration
    storage = config('storage') or 'data'
    date_begin = config('download', 'date', 'begin')\
        or '2019-09-10T00:00:00.000Z'
    date_end = config('download', 'date', 'end') or '2019-09-11T00:00:00.000Z'
    countries = config('download', 'country') or ['DE']

    # create storage folder if not existing
    os.makedirs(storage, exist_ok=True)

    for country in countries:
        wkt = bounding_box_to_wkt(*country_bounding_boxes[country][1])

        # Search for data matching the parameter
        logger.info(f'Looking for products in country {country}')
        result = sentinel5dl.search(wkt,
                                    begin_ts=date_begin,
                                    end_ts=date_end,
                                    product=product,
                                    processing_level=processing_level,
                                    logger_fn=logger.info)
        logger.info('Found {0} products'.format(len(result.get('products'))))

        # Download data
        sentinel5dl.download(result.get('products'),
                             output_dir=storage,
                             logger_fn=logger.info)
Exemple #2
0
    def save(self):
        if self.assert_readiness():
            base_save_dir = 'cache/sentinel/'
            rfiles = []
            for product in self.products:
                result = sentinel5dl.search(
                    polygon=self.polygon,
                    begin_ts=self.begin_datetime,
                    end_ts=self.end_datetime,
                    product=product,
                    processing_level=self.processing_level)

                save_dir = base_save_dir + f"{product.strip('_')}_{self.begin_datetime[:10]}_{self.end_datetime[:10]}"

                if not os.path.exists(save_dir):
                    os.makedirs(save_dir)
                else:
                    for file in glob.glob(save_dir + '/*'):
                        os.remove(file)

                sentinel5dl.download(result.get('products'),
                                     output_dir=save_dir)

                rfiles.extend(glob.glob(save_dir + '/*'))

            return rfiles
Exemple #3
0
def main():
    '''Entrypoint for running this as a module or from the binary.
    Triggers the autoupdater.
    The autoupdater will download and add all missing product files
    of the configured products and the configured interval to the database.
    '''

    parser = argparse.ArgumentParser(
        description='Automagically update the database and '
        'download and add all missing data to it.')
    parser.add_argument(
        '--ignore-existing',
        action='store_true',
        help='Ignore the already downloaded intervals.'
        'This is useful to fill the gaps between already downloaded data.')
    args = parser.parse_args()

    def init_worker():
        '''Clear session maker in fork, due to libpq
        '''
        db.__session__ = None

    # Generate temporary directory to work in.
    with tempfile.TemporaryDirectory() as tmp_dir:
        # Iterate through products and import data not already present.
        for name, product in db.products.items():
            logger.info('Updating product %s', name)
            # Iterate through time intervals to update
            for begin_ts, end_ts in get_intervals_to_process(
                    product['table'], not args.ignore_existing):
                logger.info(
                    'Searching for downloadable product files'
                    ' between %s and %s', begin_ts, end_ts)

                # Search for product files from the ESA.
                result = sentinel5dl.search(
                    begin_ts=begin_ts,
                    end_ts=end_ts,
                    processing_mode='Offline',
                    processing_level='L2',
                    product=product['product_key'],
                )
                product_files = result.get('products', [])

                # Process product files
                logger.info(
                    'Processing found product files parallel with %d workers',
                    workers)
                with multiprocessing.Pool(workers, init_worker) as p:
                    p.starmap(
                        single_file_update,
                        zip(product_files, itertools.repeat(tmp_dir),
                            itertools.repeat(product)))
            logger.info('Finished updating product %s', name)
    logger.info('Update complete')
Exemple #4
0
def main():
    result = search(
            polygon='POLYGON((7.88574278354645 49.347193400927495,'
                    '13.452152609825136 49.347193400927495,'
                    '13.452152609825136 52.870418902802214,'
                    '7.88574278354645 52.870418902802214,'
                    '7.88574278354645 49.347193400927495))',
            begin_ts='2019-09-01T00:00:00.000Z',
            end_ts='2019-09-17T23:59:59.999Z',
            product='L2__CO____',
            processing_level='L2',
            logger_fn=print)
    print('Found {0} products'.format(len(result.get('products'))))
    download(result.get('products'), logger_fn=print)
Exemple #5
0
def main():
    # Confgure logging in the library
    logging.basicConfig()
    logger = logging.getLogger(sentinel5dl.__name__)
    logger.setLevel(logging.INFO)

    # Search for Sentinel-5 products
    result = search(polygon='POLYGON((7.88574278354645 49.347193400927495,'
                    '13.452152609825136 49.347193400927495,'
                    '13.452152609825136 52.870418902802214,'
                    '7.88574278354645 52.870418902802214,'
                    '7.88574278354645 49.347193400927495))',
                    begin_ts='2019-09-01T00:00:00.000Z',
                    end_ts='2019-09-17T23:59:59.999Z',
                    product='L2__CO____',
                    processing_level='L2')

    # Download found products to the local folder
    download(result.get('products'))
    def test(self):
        '''Test search and download.
        '''
        result = sentinel5dl.search(
            polygon='POLYGON((7 49,13 49,13 52,7 52,7 49))',
            begin_ts=datetime.datetime.fromtimestamp(0),
            end_ts=datetime.datetime.now(),
            product='L2__CO____')

        # The result returned by the mock contains four products but claims a
        # total of eight products, making sentinel5dl request resources twice.
        self.assertEqual(self._count_search_request, 2)
        self.assertEqual(result['totalresults'], 8)
        self.assertEqual(result['totalresults'], len(result['products']))

        products = result['products']
        with tempfile.TemporaryDirectory() as tmpdir:

            # prepare a file which is half-downloaded
            file_one = os.path.join(tmpdir, products[0]['identifier'] + '.nc')
            with open(file_one, 'wb') as f:
                f.write(b'12')

            sentinel5dl.download(products, tmpdir)

            # test files
            for product in products:
                filename = os.path.join(tmpdir, product['identifier'] + '.nc')
                with open(filename, 'rb') as f:
                    self.assertEqual(f.read(), b'123')

            # We should have downloaded four files and have an additional four
            # files storing md5 checksums
            self.assertEqual(len(os.listdir(tmpdir)), 8)

        # We should have four checksum requests. One for each file
        self.assertEqual(self._count_checksum_request, 4)
        # We should have downloaded four unique files
        self.assertEqual(self._count_download, 4)
Exemple #7
0
def download():
    """Download data files from ESA and store them in the configured storage
    directory.
    """
    wkt = bounding_box_to_wkt(*country_bounding_boxes['DE'][1])

    # create storage folder if not existing
    os.makedirs(storage, exist_ok=True)

    # Search for data matching the parameter
    result = sentinel5dl.search(wkt,
                                begin_ts=start_date,
                                end_ts=end_date,
                                product=product,
                                processing_level=processing_level,
                                logger_fn=logger.info)
    logger.info('Found {0} products'.format(len(result.get('products'))))

    # Download data
    sentinel5dl.download(result.get('products'),
                         output_dir=storage,
                         logger_fn=logger.info)
Exemple #8
0
def download_sentinel5_offline(start_date: datetime,
                               length: timedelta,
                               _country_name: str = None,
                               _path: str = ".",
                               product: str = None) -> None:
    """
    Download the satellite data of a given start date for a given length. Filtered by Country Name,
    :param start_date: datetime
    :param length: timedelta
    :param _country_name: str
    :param _path: str
    :param product: str name of the Gas, Spectral Region
    :return: None
    """
    begin_date = '{}.000Z'.format(start_date.isoformat())
    end_date = '{}.999Z'.format((start_date + length).isoformat())

    world = geopandas.read_file(
        geopandas.datasets.get_path('naturalearth_lowres'))

    _level = None
    if product is not None:

        if product in [
                "IR_SIR", "IR_UVN", "RA_BD1", "RA_BD2", "RA_BD3", "RA_BD4",
                "RA_BD5", "RA_BD6", "RA_BD6", "RA_BD7", "RA_BD8"
        ]:
            product = "LIB_{}".format(product)
            _level = "L1B"

        if product in [
                "AER_AI", "AER_LH", "CH4", "CLOUD", "CO", "HCHO", "NP_BD3",
                "NO2", "NP_BD6", "NP_BD7", "O3_TCL", "O3", "SO2"
        ]:
            product = "L2__{}_____".format(product)[0:10]
            _level = "L2"

    if _country_name is not None:
        nation = world.query('name=="{}"'.format(_country_name))
        minx, miny, maxx, maxy = nation.geometry.total_bounds
        _country = "POLYGON(({0:.2f} {1:.2f},{0:.2f} {3:.2f},{2:.2f} {3:.2f},{2:.2f} {1:.2f},{0:.2f} {1:.2f}))".format(
            minx, miny, maxx, maxy)
    else:
        _country = None

    # Search for Sentinel-5 products
    result = sentinel5dl.search(polygon=_country,
                                begin_ts=begin_date,
                                end_ts=end_date,
                                product=product,
                                processing_level=_level,
                                processing_mode='Offline')

    if not any(
        [value is None for value in [_country_name, start_date, product]]):
        _path = os.path.join(_path, _country_name,
                             start_date.strftime("%Y-%m-%d"), product)

    if not os.path.exists(_path):
        os.makedirs(_path)

    # Download found products to the local folder
    sentinel5dl.download(products=result.get("products"), output_dir=_path)
Exemple #9
0
def main():
    # Configure logging in the library
    logging.basicConfig()
    logger = logging.getLogger(sentinel5dl.__name__)
    logger.setLevel(logging.INFO)

    parser = argparse.ArgumentParser(
        description='Search for and download Sentinel-5P data files',
        formatter_class=argparse.RawDescriptionHelpFormatter,
        epilog=f'AVAILABLE PRODUCTS\n{PRODUCTS_STR}'
    )

    # type= can use a callable, use that for most of this
    parser.add_argument(
        '--polygon',
        type=is_polygon,
        help='''Polygon defining an area by a set of coordinates.
            Example: 30.1 10.0, 40.0 40.1, 20 40, 10 20, 30.1 10.0'''
    )

    parser.add_argument(
        '--product',
        choices=PRODUCTS,
        metavar='PRODUCT',
        default='L2__CO____',
        help='Type of product to search for'
    )

    parser.add_argument(
        '--level',
        choices=PROCESSING_LEVELS,
        default='L2',
        help='Data processing level'
    )

    parser.add_argument(
        '--mode',
        choices=PROCESSING_MODES,
        help='Data processing mode'
    )

    parser.add_argument(
        '--begin-ts',
        default='2019-09-01T00:00:00.000Z',
        type=dateutil.parser.parse,
        help='''Timestamp specifying the earliest sensing date.
            Example: 2019-09-01T00:00:00.000Z'''
    )

    parser.add_argument(
        '--end-ts',
        default='2019-09-17T23:59:59.999Z',
        type=dateutil.parser.parse,
        help='''Timestamp specifying the latest sensing date.
            Example: 2019-09-17T23:59:59.999Z'''
    )

    parser.add_argument(
        '--use-certifi',
        action='store_true',
        help='''If a Certificate Authority (CA) bundle is not already supplied
            by your operating system, certifi provides an easy way of
            providing a cabundle.'''
    )

    parser.add_argument(
        '--worker',
        type=int,
        default=1,
        help='Number of parallel downloads',
    )

    parser.add_argument(
        'download_dir',
        metavar='download-dir',
        help='Download directory'
    )

    args = parser.parse_args()

    # Provide a Certificate Authority (CA) bundle
    if args.use_certifi:
        sentinel5dl.ca_info = certifi.where()

    # Search for Sentinel-5 products
    result = search(
        polygon=args.polygon,
        begin_ts=args.begin_ts,
        end_ts=args.end_ts,
        product=args.product,
        processing_level=args.level,
        processing_mode=args.mode
    )

    # Download found products to the download directory with number of workers
    with multiprocessing.Pool(args.worker) as p:
        p.starmap(download, map(
            lambda product: ((product,), args.download_dir),
            result.get('products')))