from __future__ import print_function, division
import torch
import numpy as np
import torch.nn as nn
import os
import shutil
from sklearn.metrics.pairwise import euclidean_distances
import torch.nn.functional as F
import Options
config = Options.Config()


def to_np(x):
    return x.data.cpu().numpy()


def save_checkpoint(state,
                    epoch,
                    is_best,
                    filename=config.name + '_checkpoint.pth.tar'):
    if not os.path.exists(config.checkpoints_dir):
        os.mkdir(config.checkpoints_dir)
    torch.save(
        state, os.path.join(config.checkpoints_dir,
                            str(epoch) + "_" + filename))
    if is_best:
        shutil.copyfile(
            os.path.join(config.checkpoints_dir,
                         str(epoch) + "_" + filename),
            config.name + '_model_best.pth.tar')
from __future__ import print_function, division
import torch
import torch.nn as nn
import embedding_utils
from torch.autograd import Variable
import random
import Options

opt = Options.Config()


class GANLoss(nn.Module):
    def __init__(self,
                 use_lsgan=False,
                 target_real_label=1.0,
                 target_fake_label=0.0,
                 tensor=torch.FloatTensor,
                 softlabel=False):
        super(GANLoss, self).__init__()
        self.real_label = target_real_label
        self.fake_label = target_fake_label
        self.real_label_var = None
        self.fake_label_var = None
        self.Tensor = tensor
        self.softlabel = softlabel
        if use_lsgan:
            self.loss = nn.MSELoss()
        else:
            self.loss = nn.BCELoss()

    def get_target_tensor(self, input, target_is_real):