def single_file_update(session, product_file, directory, product): '''Download a single file, add it to the database and delete the file afterwards. :param session: SQLAlchemy Session :type session: sqlalchemy.orm.session.Session :param product_file: Metadata of a single product file as received from sentinal5dl.search. :type product_file: dict :param directory: Directory to download file to. :type directory: string :param product: Type of the processed product. An entry of emissionsapi.db.products. :type product: dict ''' filename = f'{product_file["identifier"]}.nc' # Check if file already processed. if session.query(db.File)\ .filter(db.File.filename == filename).first(): logger.warning('File %s already processed', filename) return # Download file. logger.info('Downloading file %s', filename) sentinel5dl.download((product_file,), directory) # Read file into database. filepath = os.path.join(directory, filename) preprocess.preprocess_file(filepath, product['table'], product['product']) # Remove the file again. logger.info('Removing %s', filepath) os.remove(filepath)
def download(): """Download data files from ESA and store them in the configured storage directory. """ # Load download configuration storage = config('storage') or 'data' date_begin = config('download', 'date', 'begin')\ or '2019-09-10T00:00:00.000Z' date_end = config('download', 'date', 'end') or '2019-09-11T00:00:00.000Z' countries = config('download', 'country') or ['DE'] # create storage folder if not existing os.makedirs(storage, exist_ok=True) for country in countries: wkt = bounding_box_to_wkt(*country_bounding_boxes[country][1]) # Search for data matching the parameter logger.info(f'Looking for products in country {country}') result = sentinel5dl.search(wkt, begin_ts=date_begin, end_ts=date_end, product=product, processing_level=processing_level, logger_fn=logger.info) logger.info('Found {0} products'.format(len(result.get('products')))) # Download data sentinel5dl.download(result.get('products'), output_dir=storage, logger_fn=logger.info)
def save(self): if self.assert_readiness(): base_save_dir = 'cache/sentinel/' rfiles = [] for product in self.products: result = sentinel5dl.search( polygon=self.polygon, begin_ts=self.begin_datetime, end_ts=self.end_datetime, product=product, processing_level=self.processing_level) save_dir = base_save_dir + f"{product.strip('_')}_{self.begin_datetime[:10]}_{self.end_datetime[:10]}" if not os.path.exists(save_dir): os.makedirs(save_dir) else: for file in glob.glob(save_dir + '/*'): os.remove(file) sentinel5dl.download(result.get('products'), output_dir=save_dir) rfiles.extend(glob.glob(save_dir + '/*')) return rfiles
def main(): result = search( polygon='POLYGON((7.88574278354645 49.347193400927495,' '13.452152609825136 49.347193400927495,' '13.452152609825136 52.870418902802214,' '7.88574278354645 52.870418902802214,' '7.88574278354645 49.347193400927495))', begin_ts='2019-09-01T00:00:00.000Z', end_ts='2019-09-17T23:59:59.999Z', product='L2__CO____', processing_level='L2', logger_fn=print) print('Found {0} products'.format(len(result.get('products')))) download(result.get('products'), logger_fn=print)
def main(): # Confgure logging in the library logging.basicConfig() logger = logging.getLogger(sentinel5dl.__name__) logger.setLevel(logging.INFO) # Search for Sentinel-5 products result = search(polygon='POLYGON((7.88574278354645 49.347193400927495,' '13.452152609825136 49.347193400927495,' '13.452152609825136 52.870418902802214,' '7.88574278354645 52.870418902802214,' '7.88574278354645 49.347193400927495))', begin_ts='2019-09-01T00:00:00.000Z', end_ts='2019-09-17T23:59:59.999Z', product='L2__CO____', processing_level='L2') # Download found products to the local folder download(result.get('products'))
def test(self): '''Test search and download. ''' result = sentinel5dl.search( polygon='POLYGON((7 49,13 49,13 52,7 52,7 49))', begin_ts=datetime.datetime.fromtimestamp(0), end_ts=datetime.datetime.now(), product='L2__CO____') # The result returned by the mock contains four products but claims a # total of eight products, making sentinel5dl request resources twice. self.assertEqual(self._count_search_request, 2) self.assertEqual(result['totalresults'], 8) self.assertEqual(result['totalresults'], len(result['products'])) products = result['products'] with tempfile.TemporaryDirectory() as tmpdir: # prepare a file which is half-downloaded file_one = os.path.join(tmpdir, products[0]['identifier'] + '.nc') with open(file_one, 'wb') as f: f.write(b'12') sentinel5dl.download(products, tmpdir) # test files for product in products: filename = os.path.join(tmpdir, product['identifier'] + '.nc') with open(filename, 'rb') as f: self.assertEqual(f.read(), b'123') # We should have downloaded four files and have an additional four # files storing md5 checksums self.assertEqual(len(os.listdir(tmpdir)), 8) # We should have four checksum requests. One for each file self.assertEqual(self._count_checksum_request, 4) # We should have downloaded four unique files self.assertEqual(self._count_download, 4)
def download(): """Download data files from ESA and store them in the configured storage directory. """ wkt = bounding_box_to_wkt(*country_bounding_boxes['DE'][1]) # create storage folder if not existing os.makedirs(storage, exist_ok=True) # Search for data matching the parameter result = sentinel5dl.search(wkt, begin_ts=start_date, end_ts=end_date, product=product, processing_level=processing_level, logger_fn=logger.info) logger.info('Found {0} products'.format(len(result.get('products')))) # Download data sentinel5dl.download(result.get('products'), output_dir=storage, logger_fn=logger.info)
def download_sentinel5_offline(start_date: datetime, length: timedelta, _country_name: str = None, _path: str = ".", product: str = None) -> None: """ Download the satellite data of a given start date for a given length. Filtered by Country Name, :param start_date: datetime :param length: timedelta :param _country_name: str :param _path: str :param product: str name of the Gas, Spectral Region :return: None """ begin_date = '{}.000Z'.format(start_date.isoformat()) end_date = '{}.999Z'.format((start_date + length).isoformat()) world = geopandas.read_file( geopandas.datasets.get_path('naturalearth_lowres')) _level = None if product is not None: if product in [ "IR_SIR", "IR_UVN", "RA_BD1", "RA_BD2", "RA_BD3", "RA_BD4", "RA_BD5", "RA_BD6", "RA_BD6", "RA_BD7", "RA_BD8" ]: product = "LIB_{}".format(product) _level = "L1B" if product in [ "AER_AI", "AER_LH", "CH4", "CLOUD", "CO", "HCHO", "NP_BD3", "NO2", "NP_BD6", "NP_BD7", "O3_TCL", "O3", "SO2" ]: product = "L2__{}_____".format(product)[0:10] _level = "L2" if _country_name is not None: nation = world.query('name=="{}"'.format(_country_name)) minx, miny, maxx, maxy = nation.geometry.total_bounds _country = "POLYGON(({0:.2f} {1:.2f},{0:.2f} {3:.2f},{2:.2f} {3:.2f},{2:.2f} {1:.2f},{0:.2f} {1:.2f}))".format( minx, miny, maxx, maxy) else: _country = None # Search for Sentinel-5 products result = sentinel5dl.search(polygon=_country, begin_ts=begin_date, end_ts=end_date, product=product, processing_level=_level, processing_mode='Offline') if not any( [value is None for value in [_country_name, start_date, product]]): _path = os.path.join(_path, _country_name, start_date.strftime("%Y-%m-%d"), product) if not os.path.exists(_path): os.makedirs(_path) # Download found products to the local folder sentinel5dl.download(products=result.get("products"), output_dir=_path)