Exemple #1
0
    def __init__(self, net, hidden_dim, num_gc_layers, alpha=0.5, beta=1., 
            gamma=.1, projection_size = 256, projection_hidden_size = 4096, moving_average_decay = 0.99):
        super(CLSA, self).__init__()

        self.alpha = alpha
        self.beta = beta
        self.gamma = gamma
        args = arg_parse()

        self.prior = args.prior

        self.embedding_dim = hidden_dim * num_gc_layers
        self.online_encoder = net

        self.target_encoder = None
        self.target_ema_updater = EMA(moving_average_decay)
        self.online_predictor = MLP(projection_size, projection_size, projection_hidden_size)

        self.init_emb()
Exemple #2
0
def seed_everything(seed=1234):
    random.seed(seed)
    torch.manual_seed(seed)
    torch.cuda.manual_seed_all(seed)
    np.random.seed(seed)
    os.environ['PYTHONHASHSEED'] = str(seed)
    torch.backends.cudnn.deterministic = True
    torch.backends.cudnn.benchmark = False


if __name__ == '__main__':
    seed_everything()
    from model import Net
    from arguments import arg_parse
    args = arg_parse()

    target = args.target
    dim = 64
    epochs = 500
    batch_size = 20
    lamda = args.lamda
    use_unsup_loss = args.use_unsup_loss
    separate_encoder = args.separate_encoder

    path = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data', 'QM9')
    transform = T.Compose([MyTransform(), Complete(), T.Distance(norm=False)])
    dataset = QM9(path, transform=transform).shuffle()
    print('num_features : {}\n'.format(dataset.num_features))

    # Normalize targets to mean = 0 and std = 1.