Ejemplo n.º 1
0
    def save(self):
        """
        Saves the session.

        This is done by saving a "session file" containing the cookies and any
        other credientials, as well as a cache file containing the API data.

        """

        session_file_path = config.CONFIG["session-path"]

        if self.user:
            logger.info("Saving session to %s.", session_file_path)

            try:
                if utils.prepare_directory(os.path.dirname(session_file_path)):
                    logger.info(
                        "Created directory(s) %s.",
                        os.path.dirname(session_file_path)
                    )

                with open(session_file_path, "w") as f:
                    os.chmod(session_file_path, 0o600)
                    pickle.dump(
                        (
                            self.user,
                            requests.utils.dict_from_cookiejar(
                                self.requests_session.cookies
                            )
                        ),
                        f
                    )
            except IOError:
                logger.warn(
                    "Could not save session to %s.",
                    session_file_path,
                    exc_info = sys.exc_info()
                )

        api_info_file_path = config.CONFIG["api-info-path"]

        if self.api_info_raw:
            logger.info("Saving API info to %s.", api_info_file_path)

            try:
                if utils.prepare_directory(os.path.dirname(api_info_file_path)):
                    logger.info(
                        "Created directory(s) %s.",
                        os.path.dirname(api_info_file_path)
                    )

                with open(api_info_file_path, "wb") as f:
                    f.write(self.api_info_raw)
            except IOError:
                logger.warn(
                    "Could not save API Info to %s.",
                    api_info_file_path,
                    exc_info = sys.exc_info()
                )
Ejemplo n.º 2
0
def main():

    folders = glob.glob(os.path.join(args.glob_root, '*'))
    folders = [f for f in folders if os.path.isdir(f) and 'combined-results' not in f]
    print ('Experiment folders under {}:'.format(args.glob_root))
    for folder in folders: print ('\t{}'.format(folder))

    keys = args.keys.split(',') # names of the log attributes in log files
    print ('Attributes whose values will be combined:', keys)
    max_key_len = max([len(k) for k in keys])
    combined_logs = {}
    for k in keys: combined_logs[k] = []

    utils.prepare_directory(args.save_dir, force_delete=True)
    logger = utils.Logger(
        args.save_dir, 
        'results', 
        list(map(lambda k: '{}/mean'.format(k), keys)) + \
        list(map(lambda k: '{}/std'.format(k), keys)))

    for folder in folders:
        
        log_file = os.path.join(folder, args.log_file) # log file in the experiment folder
        if not os.path.exists(log_file): continue

        print ('\tLoading log file {} ...'.format(log_file))
        L = np.load(log_file)
        for k in keys:
            v = L[k]
            print ('\t\tkey:{key:{width}s}, value.shape:{keyshape:15s}, value[-1]:{keyval:}'.format(
                key=k, width=max_key_len+1, keyshape=str(np.asarray(v).shape), keyval=v[-1]))
            if len(v.shape) < 2:
                v = v.reshape([-1, 1])
            combined_logs[k].append(v)

    for k in keys:
        combined_k = np.concatenate(combined_logs[k], axis=1)
        mean_k = combined_k.mean(axis=1)
        std_k = combined_k.std(axis=1)
        for lix, (mean, std) in enumerate(zip(mean_k, std_k)):
            logger.append(['{}/mean'.format(k), '{}/std'.format(k)], [mean, std], lix)
        
        print ('\taverage {key:{width}s}: {mean:} +- {std:}'.format(
            key=k, width=max_key_len+1, mean=mean_k[-1], std=std_k[-1]))

    logger.close()
Ejemplo n.º 3
0
    def save(self):
        """
        Saves the session.

        This is done by saving a "session file" containing the cookies and any
        other credientials, as well as a cache file containing the API data.

        """

        session_file_path = config.CONFIG["session-path"]

        if self.user:
            logger.info("Saving session to %s.", session_file_path)

            try:
                if utils.prepare_directory(os.path.dirname(session_file_path)):
                    logger.info("Created directory(s) %s.",
                                os.path.dirname(session_file_path))

                with open(session_file_path, "w") as f:
                    os.chmod(session_file_path, 0o600)
                    pickle.dump((self.user,
                                 requests.utils.dict_from_cookiejar(
                                     self.requests_session.cookies)), f)
            except IOError:
                logger.warn("Could not save session to %s.",
                            session_file_path,
                            exc_info=sys.exc_info())

        api_info_file_path = config.CONFIG["api-info-path"]

        if self.api_info_raw:
            logger.info("Saving API info to %s.", api_info_file_path)

            try:
                if utils.prepare_directory(
                        os.path.dirname(api_info_file_path)):
                    logger.info("Created directory(s) %s.",
                                os.path.dirname(api_info_file_path))

                with open(api_info_file_path, "wb") as f:
                    f.write(self.api_info_raw)
            except IOError:
                logger.warn("Could not save API Info to %s.",
                            api_info_file_path,
                            exc_info=sys.exc_info())
Ejemplo n.º 4
0
    parser.add_argument('--data_dir', type=str, default='')
    parser.add_argument('--exp_dir', type=str, default='./test/')
    parser.add_argument('--ckpt_file', type=str, default='')
    parser.add_argument('--device', type=str, default='cuda')
    # parser.add_argument('--multi_gpu', action='store_true')
    parser.add_argument('--dataset', type=str, default='Simp')
    parser.add_argument('--test_split', type=float, default=0.2)
    parser.add_argument('--red_rate', type=float, default=0.0)
    parser.add_argument('--validation_split', type=float, default=0.0)
    parser.add_argument('--d_latent', type=int, default=256)
    parser.add_argument('--batch_size', type=int, default=32)
    parser.add_argument('--n_epochs', type=int, default=2000)
    parser.add_argument('--size', type=int, default=256)
    parser.add_argument('--logfile', type=str, default='test.txt')
    args = parser.parse_args()

    if args.device == 'cuda' and torch.cuda.is_available():
        from subprocess import call
        print('available gpus:')
        call([
            "nvidia-smi", "--format=csv",
            "--query-gpu=index,name,driver_version,memory.total,memory.used,memory.free"
        ])
        cudnn.benchmark = True
    else:
        args.device = 'cpu'
    utils.prepare_directory(args.exp_dir)
    utils.write_logs(args)
    configure(args.exp_dir)
    main(args)
Ejemplo n.º 5
0
    def _download(self, url, file_name):
        """
        See :meth:`download`.

        """
        downloads_directory = config.CONFIG["downloads-directory"]
        if utils.prepare_directory(downloads_directory):
            logger.info(
                "Created directory(s) %s.",
                downloads_directory
            )

        # Find an available file path
        final_file_path = utils.find_available_file(
            os.path.join(downloads_directory, file_name)
        )
        final_file_name = os.path.basename(final_file_path)

        logger.debug("File will be saved to %s.", final_file_path)

        # Get a generator function that makes a pretty progress bar.
        bar = ui.progress_bar_indeterminate()

        # Actually try to grab the file from the server
        while True:
            ui.print_carriage(
                "%s Trying to download file... %s" %
                    (ui.progress_bar(0.0), " " * 30)
            )

            # Ask the server for the file
            try:
                file_request = self.requests_session.get(
                    url, timeout = 1, stream = True, verify = _get_verify()
                )
            except requests.exceptions.Timeout:
                logger.info(
                    "Request timed out. Server did not accept connection after "
                    "1 second."
                )

            # If it's giving it to us...
            if file_request.status_code == 200:
                logger.debug(
                    "Response headers...\n%s",
                    pprint.pformat(file_request.headers, width = 72)
                )

                if "content-length" in file_request.headers:
                    size = float(file_request.headers["content-length"])
                else:
                    logger.info("File is of unknown size.")

                    size = 0
                    ui.print_carriage(
                        ui.progress_bar(-1) + " Downloading file."
                    )

                # Download the file in chunks.
                chunk_size = 124
                downloaded = 0
                with open(final_file_path, "wb") as f:
                    for chunk in file_request.iter_content(124):
                        if size != 0:
                            ui.print_carriage(
                                ui.progress_bar(downloaded / size) +
                                " Downloading file."
                            )
                            downloaded += chunk_size

                        f.write(chunk)

            # If the server got particularly angry at us...
            if (file_request.status_code == 500 or
                    ("X-CallSuccess" in file_request.headers and
                    file_request.headers["X-CallSuccess"] == "False")):
                logger.critical(
                    "500 response. The server encountered an error."
                )
                sys.exit(1)

            if file_request.status_code == requests.codes.ok:
                break

            # Make sure that the trying prompt appears for at least a moment or
            # so
            time.sleep(0.5)

            period = 0.1
            wait_for = 4
            for i in xrange(int(wait_for / period)):
                ui.print_carriage(
                    next(bar) + " Download not ready yet. Waiting."
                )

                time.sleep(period)

        print "File saved to %s." % utils.shorten_path(final_file_path)
Ejemplo n.º 6
0
                    choices=['cuda', 'cpu'])
# classifier arguments
parser.add_argument('--lr', type=float, default=0.)
parser.add_argument('--wd', type=float, default=0.)
parser.add_argument('--batch_size', type=int, default=0)
parser.add_argument('--n_epoch', type=int, default=0)
args = parser.parse_args()

np.set_printoptions(linewidth=150, precision=4, suppress=True)
th.set_printoptions(linewidth=150, precision=4)

FN = th.from_numpy
join = os.path.join
logger = logging.getLogger()

utils.prepare_directory(args.exp_root, force_delete=True)
utils.init_logger(join(args.exp_root, 'program.log'))
utils.write_args(args)

dset = data.XianDataset(args.data_dir,
                        args.mode,
                        feature_norm=args.feature_norm)
_X_s_tr = FN(dset.X_s_tr).to(args.device)
_Y_s_tr = FN(dset.Y_s_tr).to(args.device)
_X_s_te = FN(dset.X_s_te).to(args.device)
_Y_s_te = FN(dset.Y_s_te).to(args.device)
_X_u_te = FN(dset.X_u_te).to(args.device)
_Y_u_te = FN(dset.Y_u_te).to(args.device)
_Cu = FN(dset.Cu).to(args.device)
_Sall = FN(dset.Sall).to(args.device)
Ejemplo n.º 7
0
def main():

    utils.prepare_directory(args.exp_dir, force_delete=False)
    utils.init_logger(join(args.exp_dir, 'program.log'))
    utils.write_args(args)

    # **************************************** load dataset ****************************************
    dset = data.XianDataset(args.data_dir,
                            args.mode,
                            feature_norm=args.feature_norm)
    _X_s_tr = FN(dset.X_s_tr).to(args.device)
    _Y_s_tr_ix = FN(dil(dset.Y_s_tr,
                        dset.Cs)).to(args.device)  # indexed labels
    _Ss = FN(dset.Sall[dset.Cs]).to(args.device)
    _Su = FN(dset.Sall[dset.Cu]).to(args.device)
    if args.d_noise == 0: args.d_noise = dset.d_attr

    # **************************************** create data loaders ****************************************
    _sampling_weights = None
    if args.dataset != 'SUN':
        _sampling_weights = data.compute_sampling_weights(
            dil(dset.Y_s_tr, dset.Cs)).to(args.device)
    xy_iter = data.Iterator([_X_s_tr, _Y_s_tr_ix],
                            args.batch_size,
                            sampling_weights=_sampling_weights)
    label_iter = data.Iterator([torch.arange(dset.n_Cs, device=args.device)],
                               args.batch_size)
    class_iter = data.Iterator([torch.arange(dset.n_Cs)], 1)

    # **************************************** per-class means and stds ****************************************
    # per class samplers and first 2 class moments
    per_class_iters = []
    Xs_tr_mean, Xs_tr_std = [], []
    Xs_te_mean, Xs_te_std = [], []
    Xu_te_mean, Xu_te_std = [], []
    for c_ix, c in enumerate(dset.Cs):
        # training samples of seen classes
        _inds = np.where(dset.Y_s_tr == c)[0]
        assert _inds.shape[0] > 0
        _X = dset.X_s_tr[_inds]
        Xs_tr_mean.append(_X.mean(axis=0, keepdims=True))
        Xs_tr_std.append(_X.std(axis=0, keepdims=True))

        if args.n_gm_iter > 0:
            _y = np.ones([_inds.shape[0]], np.int64) * c_ix
            per_class_iters.append(
                data.Iterator([FN(_X).to(args.device),
                               FN(_y).to(args.device)],
                              args.per_class_batch_size))

        # test samples of seen classes
        _inds = np.where(dset.Y_s_te == c)[0]
        assert _inds.shape[0] > 0
        _X = dset.X_s_te[_inds]
        Xs_te_mean.append(_X.mean(axis=0, keepdims=True))
        Xs_te_std.append(_X.std(axis=0, keepdims=True))

    # test samples of unseen classes
    for c_ix, c in enumerate(dset.Cu):
        _inds = np.where(dset.Y_u_te == c)[0]
        assert _inds.shape[0] > 0
        _X = dset.X_u_te[_inds]
        Xu_te_mean.append(_X.mean(axis=0, keepdims=True))
        Xu_te_std.append(_X.std(axis=0, keepdims=True))
    del _X, _inds, c_ix, c

    Xs_tr_mean = FN(np.concatenate(Xs_tr_mean, axis=0)).to(args.device)
    Xs_tr_std = FN(np.concatenate(Xs_tr_std, axis=0)).to(args.device)
    Xs_te_mean = FN(np.concatenate(Xs_te_mean, axis=0)).to(args.device)
    Xs_te_std = FN(np.concatenate(Xs_te_std, axis=0)).to(args.device)
    Xu_te_mean = FN(np.concatenate(Xu_te_mean, axis=0)).to(args.device)
    Xu_te_std = FN(np.concatenate(Xu_te_std, axis=0)).to(args.device)

    # **************************************** create networks ****************************************
    g_net = modules.get_generator(args.gen_type)(
        dset.d_attr, args.d_noise, args.n_g_hlayer, args.n_g_hunit,
        args.normalize_noise, args.dp_g, args.leakiness_g).to(args.device)
    g_optim = optim.Adam(g_net.parameters(),
                         args.gan_optim_lr_g,
                         betas=(args.gan_optim_beta1, args.gan_optim_beta2),
                         weight_decay=args.gan_optim_wd)

    d_net = modules.ConditionalDiscriminator(dset.d_attr, args.n_d_hlayer,
                                             args.n_d_hunit,
                                             args.d_normalize_ft, args.dp_d,
                                             args.leakiness_d).to(args.device)
    d_optim = optim.Adam(d_net.parameters(),
                         args.gan_optim_lr_d,
                         betas=(args.gan_optim_beta1, args.gan_optim_beta2),
                         weight_decay=args.gan_optim_wd)
    start_it = 1

    utils.model_info(g_net, 'g_net', args.exp_dir)
    utils.model_info(d_net, 'd_net', args.exp_dir)

    if args.n_gm_iter > 0:
        if args.clf_type == 'bilinear-comp':
            clf = classifiers.BilinearCompatibility(dset.d_ft, dset.d_attr,
                                                    args)
        elif args.clf_type == 'mlp':
            clf = classifiers.MLP(dset.d_ft, dset.n_Cs, args)
        utils.model_info(clf.net, 'clf', args.exp_dir)

    pret_clf = None
    if os.path.isfile(args.pretrained_clf_ckpt):
        logger.info('Loading pre-trained {} checkpoint at {} ...'.format(
            args.clf_type, args.pretrained_clf_ckpt))
        ckpt = torch.load(args.pretrained_clf_ckpt, map_location=args.device)
        pret_clf = classifiers.BilinearCompatibility(dset.d_ft, dset.d_attr,
                                                     args)
        pret_clf.net.load_state_dict(ckpt[args.clf_type])
        pret_clf.net.eval()
        for p in pret_clf.net.parameters():
            p.requires_grad = False

    pret_regg = None
    if os.path.isfile(args.pretrained_regg_ckpt):
        logger.info(
            'Loading pre-trained regressor checkpoint at {} ...'.format(
                args.pretrained_regg_ckpt))
        ckpt = torch.load(args.pretrained_regg_ckpt, map_location=args.device)
        pret_regg = classifiers.Regressor(args, dset.d_ft, dset.d_attr)
        pret_regg.net.load_state_dict(ckpt['regressor'])
        pret_regg.net.eval()
        for p in pret_regg.net.parameters():
            p.requires_grad = False

    training_log_titles = [
        'd/loss',
        'd/real',
        'd/fake',
        'd/penalty',
        'gm/loss',
        'gm/real_loss',
        'gm/fake_loss',
        'g/fcls_loss',
        'g/cycle_loss',
        'clf/train_loss',
        'clf/train_acc',
        'mmad/X_s_tr',
        'mmad/X_s_te',
        'mmad/X_u_te',
        'smad/X_s_tr',
        'smad/X_s_te',
        'smad/X_u_te',
    ]
    if args.n_gm_iter > 0:
        training_log_titles.extend([
            'grad-cossim/{}'.format(n) for n, p in clf.net.named_parameters()
        ])
        training_log_titles.extend(
            ['grad-mse/{}'.format(n) for n, p in clf.net.named_parameters()])
    training_logger = utils.Logger(os.path.join(args.exp_dir, 'training-logs'),
                                   'logs', training_log_titles)

    t0 = time.time()

    logger.info('penguenler olmesin')
    for it in range(start_it, args.n_iter + 1):

        # **************************************** Discriminator updates ****************************************
        for p in d_net.parameters():
            p.requires_grad = True
        for p in g_net.parameters():
            p.requires_grad = False
        for _ in range(args.n_d_iter):
            x_real, y_ix = next(xy_iter)
            s = _Ss[y_ix]
            x_fake = g_net(s)

            d_real = d_net(x_real, s).mean()
            d_fake = d_net(x_fake, s).mean()
            d_penalty = modules.gradient_penalty(d_net, x_real, x_fake, s)
            d_loss = d_fake - d_real + args.L * d_penalty

            d_optim.zero_grad()
            d_loss.backward()
            d_optim.step()

            training_logger.update_meters(
                ['d/real', 'd/fake', 'd/loss', 'd/penalty'], [
                    d_real.mean().item(),
                    d_fake.mean().item(),
                    d_loss.item(),
                    d_penalty.item()
                ], x_real.size(0))

        # **************************************** Generator updates ****************************************
        for p in d_net.parameters():
            p.requires_grad = False
        for p in g_net.parameters():
            p.requires_grad = True
        g_optim.zero_grad()

        [y_fake] = next(label_iter)
        s = _Ss[y_fake]
        x_fake = g_net(s)

        # wgan loss
        d_fake = d_net(x_fake, s).mean()
        g_wganloss = -d_fake

        # f-cls loss
        fcls_loss = 0.0
        if pret_clf is not None:
            fcls_loss = pret_clf.loss(x_fake, _Ss, y_fake)
            training_logger.update_meters(['g/fcls_loss'], [fcls_loss.item()],
                                          x_fake.size(0))

        # cycle-loss
        cycle_loss = 0.0
        if pret_regg is not None:
            cycle_loss = pret_regg.loss(x_fake, s)
            training_logger.update_meters(['g/cycle_loss'],
                                          [cycle_loss.item()], x_fake.size(0))

        g_loss = args.C * fcls_loss + args.R * cycle_loss + g_wganloss
        g_loss.backward()

        # gmn iterations
        for _ in range(args.n_gm_iter):
            c = next(class_iter)[0].item()
            x_real, y_real = next(per_class_iters[c])
            y_fake = y_real.detach().repeat(args.gm_fake_repeat)
            s = _Ss[y_fake]
            x_fake = g_net(s)

            # gm loss
            clf.net.zero_grad()
            if args.clf_type == 'bilinear-comp':
                real_loss = clf.loss(x_real, _Ss, y_real)
                fake_loss = clf.loss(x_fake, _Ss, y_fake)
            elif args.clf_type == 'mlp':
                real_loss = clf.loss(x_real, y_real)
                fake_loss = clf.loss(x_fake, y_fake)

            grad_cossim = []
            grad_mse = []
            for n, p in clf.net.named_parameters():
                # if len(p.shape) == 1: continue

                real_grad = grad([real_loss], [p],
                                 create_graph=True,
                                 only_inputs=True)[0]
                fake_grad = grad([fake_loss], [p],
                                 create_graph=True,
                                 only_inputs=True)[0]

                if len(p.shape) > 1:
                    _cossim = F.cosine_similarity(fake_grad, real_grad,
                                                  dim=1).mean()
                else:
                    _cossim = F.cosine_similarity(fake_grad, real_grad, dim=0)

                # _cossim = F.cosine_similarity(fake_grad, real_grad, dim=1).mean()
                _mse = F.mse_loss(fake_grad, real_grad)
                grad_cossim.append(_cossim)
                grad_mse.append(_mse)

                training_logger.update_meters(
                    ['grad-cossim/{}'.format(n), 'grad-mse/{}'.format(n)],
                    [_cossim.item(), _mse.item()], x_real.size(0))

            grad_cossim = torch.stack(grad_cossim)
            grad_mse = torch.stack(grad_mse)
            gm_loss = (1.0 -
                       grad_cossim).sum() * args.Q + grad_mse.sum() * args.Z
            gm_loss.backward()

            training_logger.update_meters(
                ['gm/real_loss', 'gm/fake_loss'],
                [real_loss.item(), fake_loss.item()], x_real.size(0))

        g_optim.step()

        # **************************************** Classifier update ****************************************
        if args.n_gm_iter > 0:
            if it % args.clf_reset_iter == 0:
                if args.clf_reset_iter == 1:
                    # no need to generate optimizer each time
                    clf.init_params()
                else:
                    clf.reset()
            else:
                x, y_ix = next(xy_iter)
                if args.clf_type == 'bilinear-comp':
                    clf_acc, clf_loss = clf.train_step(x, _Ss, y_ix)
                else:
                    clf_acc, clf_loss = clf.train_step(x, y_ix)
                training_logger.update_meters(
                    ['clf/train_loss', 'clf/train_acc'], [clf_loss, clf_acc],
                    x.size(0))

        # **************************************** Log ****************************************
        if it % 1000 == 0:
            g_net.eval()

            # synthesize samples for seen classes and compute their first 2 moments
            Xs_fake_mean, Xs_fake_std = [], []
            with torch.no_grad():
                for c in range(dset.n_Cs):
                    y = torch.ones(256, device=args.device,
                                   dtype=torch.long) * c
                    a = _Ss[y]
                    x_fake = g_net(a)
                    Xs_fake_mean.append(x_fake.mean(dim=0, keepdim=True))
                    Xs_fake_std.append(x_fake.std(dim=0, keepdim=True))
            Xs_fake_mean = torch.cat(Xs_fake_mean)
            Xs_fake_std = torch.cat(Xs_fake_std)

            # synthesize samples for unseen classes and compute their first 2 moments
            def compute_firsttwo_moments(S, C):
                X_mean, X_std = [], []
                with torch.no_grad():
                    for c in range(dset.n_Cu):
                        y = torch.ones(
                            256, device=args.device, dtype=torch.long) * c
                        a = _Su[y]
                        x_fake = g_net(a)
                        X_mean.append(x_fake.mean(dim=0, keepdim=True))
                        X_std.append(x_fake.std(dim=0, keepdim=True))
                X_mean = torch.cat(X_mean)
                X_std = torch.cat(X_std)

            Xu_fake_mean, Xu_fake_std = [], []
            with torch.no_grad():
                for c in range(dset.n_Cu):
                    y = torch.ones(256, device=args.device,
                                   dtype=torch.long) * c
                    a = _Su[y]
                    x_fake = g_net(a)
                    Xu_fake_mean.append(x_fake.mean(dim=0, keepdim=True))
                    Xu_fake_std.append(x_fake.std(dim=0, keepdim=True))
            Xu_fake_mean = torch.cat(Xu_fake_mean)
            Xu_fake_std = torch.cat(Xu_fake_std)

            g_net.train()

            training_logger.update_meters([
                'mmad/X_s_tr', 'smad/X_s_tr', 'mmad/X_s_te', 'smad/X_s_te',
                'mmad/X_u_te', 'smad/X_u_te'
            ], [
                torch.abs(Xs_tr_mean - Xs_fake_mean).sum(dim=1).mean().item(),
                torch.abs(Xs_tr_std - Xs_fake_std).sum(dim=1).mean().item(),
                torch.abs(Xs_te_mean - Xs_fake_mean).sum(dim=1).mean().item(),
                torch.abs(Xs_te_std - Xs_fake_std).sum(dim=1).mean().item(),
                torch.abs(Xu_te_mean - Xu_fake_mean).sum(dim=1).mean().item(),
                torch.abs(Xu_te_std - Xu_fake_std).sum(dim=1).mean().item()
            ])

            training_logger.flush_meters(it)

            elapsed = time.time() - t0
            per_iter = elapsed / it
            apprx_rem = (args.n_iter - it) * per_iter
            logging.info('Iter:{:06d}/{:06d}, '\
                         '[ET:{:.1e}(min)], ' \
                         '[IT:{:.1f}(ms)], ' \
                         '[REM:{:.1e}(min)]'.format(
                            it, args.n_iter, elapsed / 60., per_iter * 1000., apprx_rem / 60))

        if it % 10000 == 0:
            utils.save_checkpoint(
                {
                    'g_net': g_net.state_dict(),
                    'd_net': d_net.state_dict(),
                    'g_optim': g_optim.state_dict(),
                    'd_optim': d_optim.state_dict(),
                    'iteration': it
                },
                args.exp_dir,
                None,
                it if it % (args.n_iter // args.n_ckpt) == 0 else None,
            )

    training_logger.close()
Ejemplo n.º 8
0
    def _download(self, url, file_name):
        """
        See :meth:`download`.

        """
        downloads_directory = config.CONFIG["downloads-directory"]
        if utils.prepare_directory(downloads_directory):
            logger.info("Created directory(s) %s.", downloads_directory)

        # Find an available file path
        final_file_path = utils.find_available_file(
            os.path.join(downloads_directory, file_name))
        final_file_name = os.path.basename(final_file_path)

        logger.debug("File will be saved to %s.", final_file_path)

        # Get a generator function that makes a pretty progress bar.
        bar = ui.progress_bar_indeterminate()

        # Actually try to grab the file from the server
        while True:
            ui.print_carriage("%s Trying to download file... %s" %
                              (ui.progress_bar(0.0), " " * 30))

            # Ask the server for the file
            try:
                file_request = self.requests_session.get(url,
                                                         timeout=1,
                                                         stream=True,
                                                         verify=_get_verify())
            except requests.exceptions.Timeout:
                logger.info(
                    "Request timed out. Server did not accept connection after "
                    "1 second.")

            # If it's giving it to us...
            if file_request.status_code == 200:
                logger.debug("Response headers...\n%s",
                             pprint.pformat(file_request.headers, width=72))

                if "content-length" in file_request.headers:
                    size = float(file_request.headers["content-length"])
                else:
                    logger.info("File is of unknown size.")

                    size = 0
                    ui.print_carriage(
                        ui.progress_bar(-1) + " Downloading file.")

                # Download the file in chunks.
                chunk_size = 124
                downloaded = 0
                with open(final_file_path, "wb") as f:
                    for chunk in file_request.iter_content(124):
                        if size != 0:
                            ui.print_carriage(
                                ui.progress_bar(downloaded / size) +
                                " Downloading file.")
                            downloaded += chunk_size

                        f.write(chunk)

            # If the server got particularly angry at us...
            if (file_request.status_code == 500
                    or ("X-CallSuccess" in file_request.headers
                        and file_request.headers["X-CallSuccess"] == "False")):
                logger.critical(
                    "500 response. The server encountered an error.")
                sys.exit(1)

            if file_request.status_code == requests.codes.ok:
                break

            # Make sure that the trying prompt appears for at least a moment or
            # so
            time.sleep(0.5)

            period = 0.1
            wait_for = 4
            for i in xrange(int(wait_for / period)):
                ui.print_carriage(
                    next(bar) + " Download not ready yet. Waiting.")

                time.sleep(period)

        print "File saved to %s." % utils.shorten_path(final_file_path)