Ejemplo n.º 1
0
def make_requests_old(ex, urls, database, table):
    start_time = time.time()
    session = rq.Session()
    futures = [ex.submit(load_url, session, url) for url in urls]
    for tally, future in enumerate(cf.as_completed(futures)):
        response = future.result()
        if response.status_code == 200:
            reference = load_yaml(cm.crawler_ref)[table]
            extract_dir = cm.crawl_extract_dir
            store_response(response.json(), reference, database, extract_dir)
            status = True
        else:
            status = False
            log.debug("Request failed: {0}".format(response.request.url))
        track_time(start_time, tally, len(urls), table, status)
        if tally % cm.load_rate == 0:
            load_responses(cm.crawl_extract_dir, database)
            db.export_files(database, cm.crawl_export_dir)
Ejemplo n.º 2
0
def export():
    db.clear_files(export_dir)
    db.export_files(database_file, export_dir)
Ejemplo n.º 3
0
def main():
    cm = db.load_config()
    database = cm.database_file
    tables = get_tables(database)
    uuids = get_uuids(tables, database)
    db.export_files(database, cm.export_dir)