Beispiel #1
0
 def __init__(self):
     print 'Connect to Firebase...'
     self.db = firebase.Firebase(BASE_URL)
     self.db.uniquify()
     self.set_data()
     print 'Create session...'
     self.create_session()
     self.params = {
         'save_path': SAVE_PATH,
         'storage_mode': lt.storage_mode_t(2),
         'paused': False,
         'auto_managed': True,
         'duplicate_is_error': True
     }
     create_dirs(DIRS.values() + FINAL_DIRS.values())
     self.add_torrents(self.undownloaded)
Beispiel #2
0
 def __init__(self):
     print 'Connect to Firebase...'
     self.db = firebase.Firebase(BASE_URL)
     self.db.uniquify()
     self.set_data()
     print 'Create session...'
     self.create_session()
     self.params = {
             'save_path': SAVE_PATH,
             'storage_mode': lt.storage_mode_t(2),
             'paused': False,
             'auto_managed': True,
             'duplicate_is_error': True
             }
     create_dirs(DIRS.values() + FINAL_DIRS.values())
     self.add_torrents(self.undownloaded)
Beispiel #3
0
        'TV': tv,
        'L2': l2,
        'inception_mean': is_m,
        'inception_std': is_std
    }
    metrics_data['results'].append(m_result)

    if 'logger' in metrics_data:
        _str = 'i:%d;\t\E: %f;\t\TV: %f;\t\L2: %f;\tIS-mean: %f;\tIS-std: %f;' % \
               (iteration, e, tv, l2, is_m, is_std)
        metrics_data['logger'].info(_str)


if __name__ == "__main__":
    opt = parse_args()
    create_dirs(opt)
    logger = setup_logger(opt.outdir)
    logger.info(opt)
    if opt.verbose:
        print(opt)

    # Fix seed, use also random.seed(opt.seed) if it is used
    torch.manual_seed(opt.seed)
    if opt.cuda:
        import torch.backends.cudnn as cudnn
        cudnn.benchmark = True
        torch.cuda.manual_seed(opt.seed)
        dtype = torch.cuda.FloatTensor
    else:
        dtype = torch.FloatTensor
    global img_dir
    global processes_num
    processes_num = download_processes_num
    img_dir = site_subdir_creator(img_dir)(download_dir)
    except_log_file = open(except_log_file_name, "w")
    start_time = time.time()
    discoverireland_data = process_site_layers(site_layers_description_list, except_log_file)
    print "discoverireland.ie scrapping time: ", str(time.time() - start_time)
    save_data(discoverireland_data, "discoverireland.dat")
    except_log_file.close()
    discoverireland_data = get_saved_data("discoverireland.dat")
    to_csv(discoverireland_data)


if __name__ == "__main__":

    create_dirs(img_dir)
    exceptions_log_file = open("discoverireland_exceptions.log", "w")
    start_time = time.time()
    discoverireland_data = process_site_layers(site_layers_description_list, exceptions_log_file)
    print "discoverireland.ie scrapping time: ", str(time.time() - start_time)
    save_data(discoverireland_data, "discoverireland.dat")
    exceptions_log_file.close()

    discoverireland_data = get_saved_data("discoverireland.dat")
    to_csv(discoverireland_data)

    # exceptions_log_file = open('discoverireland_exceptions.log', 'w')
    # first_layer_processor(site_layers_description_list[0], exceptions_log_file)
    # exceptions_log_file.close()