def get_tile_for(args): dem_name, tile_name = args output_dir = 'DEM' dem_list = get_dem_list() for dem in dem_list: if dem['name'] == dem_name: source_file = os.path.join(dem['location'], tile_name) + '.tif' if source_file.startswith('http'): download_file(source_file, directory=output_dir) else: shutil.copy(source_file, output_dir)
def main_v2(): parser = ArgumentParser() parser.add_argument('--username', required=True) parser.add_argument('--password', required=True) parser.add_argument('--bucket') parser.add_argument('--bucket-prefix', default='') parser.add_argument('granule') args = parser.parse_args() logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.INFO) write_netrc_file(args.username, args.password) granule_url = get_download_url(args.granule) granule_zip_file = download_file(granule_url, chunk_size=5242880) output_folder, product_name = rtc_sentinel_gamma(granule_zip_file) os.rename(output_folder, product_name) output_zip = make_archive(base_name=product_name, format='zip', base_dir=product_name) if args.bucket: upload_file_to_s3(output_zip, args.bucket, args.bucket_prefix) browse_images = glob.glob(f'{product_name}/*.png') for browse in browse_images: thumbnail = create_thumbnail(browse) upload_file_to_s3(browse, args.bucket, args.bucket_prefix + '/browse') upload_file_to_s3(thumbnail, args.bucket, args.bucket_prefix + '/thumbnail')
def process_rtc_gamma(cfg, n): try: logging.info( f'Processing GAMMA RTC "{cfg["sub_name"]}" for "{cfg["username"]}"' ) granule = cfg['granule'] if not re.match('S1[AB]_.._[SLC|GRD]', granule): raise GranuleError( f'Invalid granule, only S1 SLC and GRD data are supported: {granule}' ) res = get_extra_arg(cfg, 'resolution', '30m') if res not in ('10m', '30m'): raise ValueError( f'Invalid resolution, valid options are 10m or 30m: {res}') granule_url = get_download_url(granule) granule_zip_file = download_file(granule_url, chunk_size=5242880) args = { 'in_file': granule_zip_file, 'res': float(res.rstrip('m')), 'match_flag': extra_arg_is(cfg, 'matching', 'yes'), 'pwr_flag': extra_arg_is(cfg, 'power', 'yes'), 'gamma_flag': extra_arg_is(cfg, 'gamma0', 'yes'), 'lo_flag': res == '30m', 'filter_flag': extra_arg_is(cfg, 'filter', 'yes'), } product_dir, product_name = rtc_sentinel_gamma(**args) logging.info(f'Renaming {product_dir} to {product_name}') os.rename(product_dir, product_name) product_dir = product_name if extra_arg_is(cfg, 'include_dem', 'no'): find_and_remove(product_dir, '*_dem.tif*') if extra_arg_is(cfg, 'include_inc', 'no'): find_and_remove(product_dir, '*_inc_map.tif*') zip_file = make_archive(base_name=product_dir, format='zip', base_dir=product_dir) cfg['final_product_size'] = [ os.stat(zip_file).st_size, ] cfg['attachment'] = find_png(product_dir) cfg['email_text'] = ' ' with get_db_connection('hyp3-db') as conn: upload_product(zip_file, cfg, conn) success(conn, cfg) except Exception as e: logging.exception('Processing failed') logging.info('Notifying user') failure(cfg, str(e)) cleanup_workdir(cfg)
def get_granule(granule): download_url = get_download_url(granule) zip_file = download_file(download_url) log.info(f'Unzipping {zip_file}') with ZipFile(zip_file) as z: z.extractall() os.remove(zip_file) return f'{granule}.SAFE'
def _download_and_verify_orbit(url: str, directory: str = ''): orbit_file = download_file(url, directory=directory) try: verify_opod(orbit_file) except ValueError: raise OrbitDownloadError(f'Downloaded an invalid orbit file {orbit_file}') return orbit_file
def test_download_file(safe_data, tmp_path): with open(os.path.join(safe_data, 'granule_name.txt')) as f: text = f.read() responses.add( responses.GET, 'http://hyp3.asf.alaska.edu/foobar.txt', body=text, status=200, ) download_path = fetch.download_file( 'http://hyp3.asf.alaska.edu/foobar.txt', directory=tmp_path) assert download_path == os.path.join(tmp_path, 'foobar.txt') assert os.path.exists(download_path) with open(download_path) as f: assert f.read() == text
def test_download_file_none(): with pytest.raises(requests.exceptions.InvalidURL): _ = fetch.download_file(url=None)