def download_start_of_day_map(dtime): dtime = start_of_day(dtime) print(f'Fetching AIA map for {dtime}') # This is broken for now, see https://github.com/sunpy/sunpy/issues/4159 """ query = (a.Time(dtime, dtime + timedelta(days=1), dtime), a.Instrument('AIA'), a.Wavelength(193 * u.Angstrom)) result = Fido.search(*query) try: mappath = Fido.fetch(result[0, 0])[0] except IndexError as e: raise RuntimeError(f'No AIA map available for {dtime}') mappath = pathlib.Path(mappath) mappath.replace(map_path(dtime)) """ import parfive dl = parfive.Downloader(max_conn=1) url = (f"http://jsoc2.stanford.edu/data/aia/synoptic/nrt/" f"{dtime.year}/{dtime.month:02}/{dtime.day:02}/" f"H0000/AIA{dtime.year}{dtime.month:02}{dtime.day:02}_" f"000000_0193.fits") dl.enqueue_file(url, filename=map_path(dtime)) res = dl.download() if len(res.errors): print(res.errors) raise RuntimeError('Download failed')
def download_sample_data(overwrite=False): """ Download all sample data at once. This will overwrite any existing files. Parameters ---------- overwrite: `bool` Overwrite existing sample data. """ # Workaround for tox only. This is not supported as a user option sampledata_dir = os.environ.get("SUNPY_SAMPLEDIR", False) if sampledata_dir: sampledata_dir = Path(sampledata_dir).expanduser().resolve() _is_writable_dir(sampledata_dir) else: # Creating the directory for sample files to be downloaded sampledata_dir = Path(get_and_create_sample_dir()) dl = parfive.Downloader(overwrite=overwrite) first_url = _base_urls[0] already_downloaded = [] for file_name in _sample_files.keys(): url = urljoin(first_url, file_name) fname = sampledata_dir / file_name # We have to avoid calling download if we already have all the files. if fname.exists() and not overwrite: already_downloaded.append(fname) else: dl.enqueue_file(url, filename=sampledata_dir / file_name) if dl.queued_downloads: results = dl.download() else: return already_downloaded if not results.errors: return results for retry_url in _base_urls[1:]: for i, err in enumerate(results.errors): file_name = Path(err.url).name # Overwrite the parfive error to change the url to a mirror new_url = urljoin(retry_url, file_name) results._errors[i] = _error(err.filepath_partial, new_url, err.exception) results = dl.retry(results) if not results.errors: return results for err in results.errors: file_name = Path(err.url).name warnings.warn(f"File {file_name} not found.", SunpyUserWarning) return results
def _get_file(self, params, directory=None, overwrite=False): """Downloads a file and return the filepath to that file.""" if directory is None: directory = Path(sunpy.config.get('downloads', 'download_dir')) else: directory = Path(directory).expanduser().absolute() downloader = parfive.Downloader(overwrite=overwrite) url = urllib.parse.urljoin(self._api, "?" + urllib.parse.urlencode(params)) downloader.enqueue_file(url, path=directory) res = downloader.download() if len(res) == 1: return res[0] else: return res
def test_fetch_with_headers(httpserver, tmpdir, mocked_client): httpserver.expect_request( "/download/asdf", query_string="datasetId=abcd").respond_with_data( b"This isn't an asdf", headers={"Content-Disposition": "attachment; filename=abcd.asdf"}) mocked_client._BASE_DOWNLOAD_URL = httpserver.url_for("/download") response = DKISTQueryResponseTable({'Dataset ID': ['abcd']}) downloader = parfive.Downloader() mocked_client.fetch(response, downloader=downloader, path=tmpdir) assert len(downloader.http_queue) == 1 results = downloader.download() assert len(results) == 1, results.errors assert results[0] == str(tmpdir / "abcd.asdf")