def make_requests_old(ex, urls, database, table): start_time = time.time() session = rq.Session() futures = [ex.submit(load_url, session, url) for url in urls] for tally, future in enumerate(cf.as_completed(futures)): response = future.result() if response.status_code == 200: reference = load_yaml(cm.crawler_ref)[table] extract_dir = cm.crawl_extract_dir store_response(response.json(), reference, database, extract_dir) status = True else: status = False log.debug("Request failed: {0}".format(response.request.url)) track_time(start_time, tally, len(urls), table, status) if tally % cm.load_rate == 0: load_responses(cm.crawl_extract_dir, database) db.export_files(database, cm.crawl_export_dir)
def export(): db.clear_files(export_dir) db.export_files(database_file, export_dir)
def main(): cm = db.load_config() database = cm.database_file tables = get_tables(database) uuids = get_uuids(tables, database) db.export_files(database, cm.export_dir)