def do_scrape(
    download_dir="download", download_processes_num=8, except_log_file_name="request_exceptions_discoverireland.log"
):
    global img_dir
    global processes_num
    processes_num = download_processes_num
    img_dir = site_subdir_creator(img_dir)(download_dir)
    except_log_file = open(except_log_file_name, "w")
    start_time = time.time()
    discoverireland_data = process_site_layers(site_layers_description_list, except_log_file)
    print "discoverireland.ie scrapping time: ", str(time.time() - start_time)
    save_data(discoverireland_data, "discoverireland.dat")
    except_log_file.close()
    discoverireland_data = get_saved_data("discoverireland.dat")
    to_csv(discoverireland_data)
    global img_dir
    global processes_num
    processes_num = download_processes_num
    img_dir = site_subdir_creator(img_dir)(download_dir)
    except_log_file = open(except_log_file_name, "w")
    start_time = time.time()
    discoverireland_data = process_site_layers(site_layers_description_list, except_log_file)
    print "discoverireland.ie scrapping time: ", str(time.time() - start_time)
    save_data(discoverireland_data, "discoverireland.dat")
    except_log_file.close()
    discoverireland_data = get_saved_data("discoverireland.dat")
    to_csv(discoverireland_data)


if __name__ == "__main__":

    create_dirs(img_dir)
    exceptions_log_file = open("discoverireland_exceptions.log", "w")
    start_time = time.time()
    discoverireland_data = process_site_layers(site_layers_description_list, exceptions_log_file)
    print "discoverireland.ie scrapping time: ", str(time.time() - start_time)
    save_data(discoverireland_data, "discoverireland.dat")
    exceptions_log_file.close()

    discoverireland_data = get_saved_data("discoverireland.dat")
    to_csv(discoverireland_data)

    # exceptions_log_file = open('discoverireland_exceptions.log', 'w')
    # first_layer_processor(site_layers_description_list[0], exceptions_log_file)
    # exceptions_log_file.close()