Example #1
0
 def __init__(self, config):
     self.config = config  # model configuration
     self.share_layer_A = torch.nn.Linear(config['latent_dim'],
                                          config['latent_dim'])
     self.share_layer_B = torch.nn.Linear(config['latent_dim'],
                                          config['latent_dim'])
     self.metric_layer_A = torch.nn.Linear(config['latent_dim'],
                                           config['latent_dim'])
     self.metric_layer_B = torch.nn.Linear(config['latent_dim'],
                                           config['latent_dim'])
     self.modelA = MLP(config)
     self.modelB = MLP(config)
     self.sharelayer = ShareLayer(config)
     if config['use_cuda'] is True:
         self.modelA.cuda()
         self.modelB.cuda()
         self.sharelayer.cuda()
     self.optA = use_optimizer(self.modelA, config)
     self.optB = use_optimizer(self.modelB, config)
     self.optshare = torch.optim.SGD(self.sharelayer.parameters(), lr=1e-1)
     self.optmetric_A = torch.optim.SGD(self.metric_layer_A.parameters(),
                                        lr=1e-1)
     self.optmetric_B = torch.optim.SGD(self.metric_layer_B.parameters(),
                                        lr=1e-1)
     self.crit = torch.nn.MSELoss()
Example #2
0
 def __init__(self, config):
     self.config = config  # model configuration
     self.modelA = MLP(config)
     self.modelB = MLP(config)
     if config['use_cuda'] is True:
         use_cuda(True, config['device_id'])
         self.modelA.cuda()
         self.modelB.cuda()
     print(self.modelA)
     if config['pretrain']:
         self.model.load_pretrain_weights()
     self.optA = use_optimizer(self.modelA, config)
     self.optB = use_optimizer(self.modelB, config)
     self.crit = torch.nn.MSELoss()
     self.alpha = config['alpha']
Example #3
0
 def __init__(self):
     self._writer = SummaryWriter(log_dir=Config["normal_config"]
                                  ["model_log_dir"])  # tensorboard writer
     # self._writer.add_text('config', str(config), 0)
     self.opt = use_optimizer(self.model)
     self.scheduler = use_scheduler(self.opt)
     self.crit = nn.BCELoss()
Example #4
0
 def __init__(self, config):
     self.config = config  # model configuration
     self.model = MLP(config)
     if config['use_cuda'] is True:
         self.model.cuda()
     self.opt = use_optimizer(self.model, config)
     self.crit = torch.nn.MSELoss()
 def __init__(self, config):
     self.config = config  # model configuration
     self._metron = MetronAtK(top_k=10)
     self._writer = SummaryWriter(log_dir='runs/{}'.format(config['alias']))  # tensorboard writer
     self._writer.add_text('config', str(config), 0)
     self.opt = use_optimizer(self.model, config)
     self.crit = torch.nn.MSELoss()
    def __init__(self, config):
        self.config = config
        self.opt = use_optimizer(self.model, config)
        self.crit = torch.nn.BCELoss()
        torch.autograd.set_detect_anomaly(True)

        if torch.cuda.is_available():
            self.crit = self.crit.cuda()
Example #7
0
 def __init__(self, config):
     self.config = config  # model configuration
     self._metron = MetronAtK(top_k=10)
     self.opt = use_optimizer(self.model, config)
     self.model_name = config['model']
     # explicit feedback
     # self.crit = torch.nn.MSELoss()
     # implicit feedback
     if self.model_name == 'MF':
         self.crit = torch.nn.MSELoss()
     else:
         self.crit = torch.nn.BCELoss()
 def __init__(self, config):
     self.config = config  # model configuration
     self._metron = MetronAtK()
     self._writer = SummaryWriter(log_dir='runs/{}'.format(
         config['alias']))  # tensorboard writer
     self._writer.add_text('config', str(config), 0)
     self.opt = use_optimizer(self.model, config)
     if not config['implicit']:
         # explicit feedback
         self.crit = torch.nn.MSELoss()
     else:
         # implicit feedback
         self.crit = torch.nn.BCEWithLogitsLoss()
 def __init__(self, config):
     self.config = config  # model configuration
     self._metron = MetronAtK(top_k=10)
     self._writer = SummaryWriter(log_dir='runs/{}'.format(
         config['alias']))  # tensorboard writer
     self._writer.add_text('config', str(config), 0)
     self.opt = use_optimizer(self.model, config)
     self.crit = torch.nn.BCELoss()
     self.mse = torch.nn.MSELoss()
     self.sparse = False
     if config['friend_item_matrix'].split(".")[-1] == "npz":
         self.friend_item_matrix = scipy.sparse.load_npz(
             config['friend_item_matrix'])
         self.sparse = True
     else:
         self.friend_item_matrix = np.load(config['friend_item_matrix'])
Example #10
0
    def __init__(self, config):
        """
        Function to initialize the engine
        :param config: configuration dictionary
        """
        self.config = config  # model configuration
        self._metron = MetronAtK(top_k=10)  # Metrics for Top-10
        self._writer = SummaryWriter(log_dir='runs/{}'.format(
            config['alias']))  # Tensorboard Writer
        self._writer.add_text('config', str(config),
                              0)  # String output for Tensorboard Writer
        self.opt = use_optimizer(self.model, config)  # set optimizer

        # self.crit = torch.nn.MSELoss() # mean squared error loss for explicit feedback
        self.crit = torch.nn.BCELoss(
        )  # binary cross entropy loss for implicit feedback
Example #11
0
 def __init__(self, args):
     self.opt = utils.use_optimizer(self.model, args)
     self.device = self.set_device()
     w_pos = torch.tensor([args.pos_weight]).to(self.device)
     self.loss = nn.BCEWithLogitsLoss(pos_weight=w_pos)