def fetch(state, datefilter='', unprocessed=False): """ Scrape data files and store in local file cache under standardized name. State is required. Optionally provide 'datefilter' to limit files that are fetched. """ state_mod = load_module(state, ['datasource', 'fetch']) datasrc = state_mod.datasource.Datasource() if hasattr(state_mod, 'fetch'): fetcher = state_mod.fetch.FetchResults() else: fetcher = BaseFetcher(state) if unprocessed: try: filename_url_pairs = datasrc.unprocessed_filename_url_pairs(datefilter) except NotImplementedError: sys.exit("No unprocessed data files are available. Try running this " "task without the --unprocessed option.") else: filename_url_pairs = datasrc.filename_url_pairs(datefilter) for std_filename, url in filename_url_pairs: fetcher.fetch(url, std_filename)
def fetch(state, datefilter=''): """ Scrape raw data files and store in local file cache under standardized name. State is required. Optionally provide 'datefilter' to limit files that are fetched. """ state_mod = load_module(state, ['datasource', 'fetch']) datasrc = state_mod.datasource.Datasource() if hasattr(state_mod, 'fetch'): fetcher = state_mod.fetch.FetchResults() else: fetcher = BaseFetcher(state) for std_filename, url in datasrc.filename_url_pairs(datefilter): fetcher.fetch(url, std_filename)