def download_func(remote_base_url, local_base_dir, directory, fname, remote_fname, extension): local_dir = local_base_dir / directory remote_dir = ('pub/helios-data/E2_experiment/' 'Data_Cologne_Nov2016_bestdata/' 'HR/helios{}'.format(probe)) remote_url = 'ftp://' + remote_base_url + '/' + remote_dir original_fname = fname fname = None # Because the filename contains a number between 0 and 24 at the end, # get a list of all the filenames and compare them to the filename # we want with FTP(remote_base_url) as ftp: ftp.login() remote_fnames = ftp.nlst(remote_dir) for remote_fname in remote_fnames: if original_fname in remote_fname: fname = remote_fname break if fname is None: raise util.NoDataError util._download_remote(remote_url, fname, local_base_dir) # Rename to a sensible and deterministic file name downloaded_path = (local_base_dir / fname).with_suffix(extension) new_path = (local_base_dir / original_fname).with_suffix(extension) downloaded_path.rename(new_path)
def download(self, interval): url = self.base_url + str(self.local_dir(interval)) try: util._download_remote(url, self.fname(interval), self.local_path(interval).parent) except urllib.error.HTTPError: raise util.NoDataError
def download(self, interval): url = omni_url + '/low_res_omni' local_dir = self.local_path(interval).parent local_dir.mkdir(parents=True, exist_ok=True) fname = self.fname(interval) util._download_remote(url, fname, local_dir)
def download(self, interval): filename = self.fname(interval) local_dir = self.local_path(interval).parent remote_base_url = spdf_url + str(self.local_dir(interval)) print(filename) print(remote_base_url) util._download_remote(remote_base_url, filename, local_dir)
def download(self, interval): local_dir = self.local_path(interval).parent local_dir.mkdir(parents=True, exist_ok=True) year = interval.start.strftime('%Y') base_url = ('http://pds-ppi.igpp.ucla.edu/ditdos/download?' 'id=pds://PPI/CO-E_SW_J_S-MAG-4-SUMM-1MINAVG-V2.0/DATA') url = '{}/{}'.format(base_url, year) util._download_remote(url, self.fname(interval), local_dir)
def download_func(remote_base_url, local_base_dir, directory, fname, remote_fname, extension): remote_url = remote_base_url + str(directory) local_dir = local_base_dir / directory filename = fname + extension try: util._download_remote(remote_url, filename, local_dir) except URLError: raise util.NoDataError
def _download_ulysses(options, fname, local_dir): """Common downloading functionality""" dl_url = ulysses_url for key in options: dl_url += key + '=' + options[key] + '&' # Download data try: util._download_remote(dl_url, fname, local_dir) except urllib.error.HTTPError: raise util.NoDataError
def download(self, interval): remote_dir = (pathlib.Path('E3_experiment') / 'helios{}_6sec_ness'.format(self.probe) / interval.start.strftime('%Y')) remote_url = f'{remote_base_url}{remote_dir}' try: util._download_remote(remote_url, self.fname(interval), self.local_path(interval).parent) except URLError: raise util.NoDataError
def download(self, interval): local_dir = self.local_path(interval).parent local_dir.mkdir(parents=True, exist_ok=True) year = interval.start.strftime('%Y') remote_dir = (pathlib.Path('E1_experiment') / 'New_proton_corefit_data_2017' / 'ascii' / f'helios{self.probe}' / f'{year}') remote_url = '{}{}'.format(remote_base_url, remote_dir) try: util._download_remote(remote_url, self.fname(interval), local_dir) except urllib.error.HTTPError: raise util.NoDataError
def download(self, interval): local_dir = self.local_path(interval).parent local_dir.mkdir(parents=True, exist_ok=True) fname = self.fname(interval) remote_base_url = ulysses_url swics_options = url_options swics_options['FILE_NAME'] = fname swics_options['FILE_PATH'] = '/ufa/HiRes/data/swics' for key in swics_options: remote_base_url += key + '=' + swics_options[key] + '&' util._download_remote(remote_base_url, fname, local_dir) return self.local_path(interval)
def download(self, interval): local_dir = self.local_path(interval).parent local_dir.mkdir(parents=True, exist_ok=True) fname = self.fname(interval) yearstr = self.yearstr(interval) remote_base_url = ulysses_url fgm_options = url_options fgm_options['FILE_NAME'] = fname fgm_options['FILE_PATH'] = '/ufa/HiRes/VHM-FGM/' + yearstr for key in fgm_options: remote_base_url += key + '=' + fgm_options[key] + '&' util._download_remote(remote_base_url, fname, local_dir) return self.local_path(interval)
def download(self, interval): local_dir = self.local_path(interval).parent local_dir.mkdir(parents=True, exist_ok=True) fname = self.fname(interval) yearstr = self.yearstr(interval) remote_base_url = ulysses_url swoops_options = url_options year = fname[1:3] # doy = fname[5:8] swoops_options['FILE_NAME'] = fname swoops_options['FILE_PATH'] =\ ('/ufa/stageIngestArea/swoops/ions/bamion{}.zip_files'.format(year)) for key in swoops_options: remote_base_url += key + '=' + swoops_options[key] + '&' util._download_remote(remote_base_url, fname, local_dir) return self.local_path(interval)
def download_func(remote_base_url, local_base_dir, directory, fname, remote_fname, extension): remote_dir = ('E2_experiment/' 'Data_Cologne_Nov2016_bestdata/' 'HR/helios{}'.format(probe)) remote_url = remote_base_url + '/' + remote_dir original_fname = fname fname = None # Because the filename contains a number between 0 and 24 at the end, # get a list of all the filenames and compare them to the filename # we want # new http functionality def get_file_list(url, ext='', params={}): response = requests.get(url, params=params) if response.ok: response_text = response.text else: return response.raise_for_status() soup = BeautifulSoup(response_text, 'html.parser') complete_file_list = [ node.get('href') for node in soup.find_all('a') if node.get('href').endswith(ext) ] return complete_file_list ext = 'asc' file_list = get_file_list(remote_url, ext) for filename in file_list: if original_fname in filename: fname = filename break if fname is None: raise util.NoDataError util._download_remote(remote_url, fname, local_base_dir) # Rename to a sensible and deterministic file name downloaded_path = (local_base_dir / fname).with_suffix(extension) new_path = (local_base_dir / original_fname).with_suffix(extension) downloaded_path.rename(new_path)
def download(self, interval): remote_dir = ('E2_experiment/' 'Data_Cologne_Nov2016_bestdata/' 'HR%20-%20High%20Resolution%204Hz%20Data/' f'helios{self.probe}') remote_url = f'{remote_base_url}/{remote_dir}' local_fname = self.fname(interval) remote_fname = None # Because the filename contains a number between 0 and 24 at the end, # get a list of all the filenames and compare them to the filename # we want def get_file_list(url, ext='', params={}): response = requests.get(url, params=params) if response.ok: response_text = response.text else: return response.raise_for_status() soup = BeautifulSoup(response_text, 'html.parser') complete_file_list = [ node.get('href') for node in soup.find_all('a') if node.get('href').endswith(ext) ] return complete_file_list ext = 'asc' remote_file_list = get_file_list(remote_url, ext) for filename in remote_file_list: if local_fname[:-4] in filename: remote_fname = filename break if remote_fname is None: raise util.NoDataError dl_dir = self.local_path(interval).parent util._download_remote(remote_url, remote_fname, dl_dir) # Rename to a sensible and deterministic file name downloaded_path = (dl_dir / remote_fname) new_path = self.local_path(interval) downloaded_path.rename(new_path)
def download(self, interval): local_dir = self.local_path(interval).parent local_dir.mkdir(parents=True, exist_ok=True) year = interval.start.strftime('%Y') doy = interval.start.strftime('%j') url = '{}/{}/{}'.format(self.base_url, year, doy) util._download_remote(url, self.fname(interval) + '.DAT', local_dir) util._download_remote(url, self.fname(interval) + '.LBL', local_dir) util._download_remote(url, self.sensor.upper() + "_V01.FMT", local_dir) create_caps_hdf5_file(local_dir / (self.fname(interval) + '.DAT'), local_dir / (self.fname(interval) + '.LBL'), local_dir / (self.sensor.upper() + "_V01.FMT"))
def download_func(remote_base_url, local_base_dir, directory, fname, remote_fname, extension): url = remote_base_url + '/' + str(directory) util._download_remote(url, fname + extension, local_base_dir / directory)
def download_func(remote_base_url, local_base_dir, directory, fname, remote_fname, extension): url = '{}'.format(remote_base_url) util._download_remote(url, fname + extension, local_base_dir / directory)
def download_func(remote_base_url, local_base_dir, directory, fname, remote_fname, extension): filename = fname + extension local_dir = path.Path(local_base_dir) / directory util._download_remote(remote_base_url, filename, local_dir)
def download_func(remote_base_url, local_base_dir, directory, fname, remote_fname, extension): remote_url = remote_base_url + str(directory) filename = fname + extension local_dir = local_base_dir / directory util._download_remote(remote_url, filename, local_dir)
def download(self, interval): filename = self.fname(interval) local_dir = self.local_path(interval).parent remote_base_url = imp_url + f'imp{self.probe}/merged' util._download_remote(remote_base_url, filename, local_dir)