Ejemplo n.º 1
0
 def __init__(self, v_size, t_size, u_size, emb_dim_v, emb_dim_t, emb_dim_u,
              emb_dim_d, hidden_dim, nb_cnt, candidate, vid_coor_nor):
     super(BiRNNT, self).__init__(v_size, emb_dim_v, hidden_dim)
     self.t_size = t_size
     self.v_size = v_size
     self.emb_dim_t = emb_dim_t
     self.emb_dim_u = emb_dim_u
     self.emb_dim_d = emb_dim_d
     self.nb_cnt = nb_cnt
     self.embedder_t = nn.Embedding(t_size, self.emb_dim_t, padding_idx=0)
     self.embedder_u = nn.Embedding(u_size, self.emb_dim_u, padding_idx=0)
     #self.embedder_lat = nn.Embedding(l_size,self.emb_dim_l,padding_idx=0)
     #self.embedder_lon = nn.Embedding(l_size,self.embd_dim_l,padding_idx=0)
     self.decoder_dim = self.hidden_dim * 2 + self.emb_dim_t + self.emb_dim_u + self.nb_cnt
     self.decoder = nn.Linear(self.decoder_dim, self.v_size)
     self.decoder2 = nn.Linear(self.nb_cnt, self.v_size)
     self.candidate = candidate
     self.vid_coor_nor = vid_coor_nor
     self.linear_d1 = nn.Linear(v_size, 1)
     self.embedder_d2 = nn.Embedding(v_size, self.emb_dim_d, padding_idx=0)
     self.att_merger = nn.Linear(2, 1)
     self.decoder_hl = IndexLinear(self.hidden_dim, v_size)
     self.decoder_hs = IndexLinear(self.hidden_dim, v_size)
     self.decoder_t = IndexLinear(self.emb_dim_t, v_size)
     self.decoder_u = IndexLinear(self.emb_dim_u, v_size)
     self.merger_weight = nn.Parameter(torch.ones(1, 5) / 5.0)
Ejemplo n.º 2
0
    def __init__(self, u_size, v_size, t_size, emb_dim_u=32, emb_dim_v=32, emb_dim_t=16, hidden_dim=32, nb_cnt=15, sampling_list=None, vid_coor_nor=None, vid_pop=None, dropout=0.5, mod=0):
        super(AttentionModelNew, self).__init__()
        self.u_size = u_size
        self.v_size = v_size
        self.t_size = t_size
        self.emb_dim_u = emb_dim_u
        self.emb_dim_v = emb_dim_v
        self.emb_dim_t = emb_dim_t
        self.hidden_dim = hidden_dim
        self.nb_cnt = nb_cnt
        self.sampling_list = sampling_list
        self.vid_coor_nor = vid_coor_nor
        self.vid_pop = vid_pop
        self.dropout = dropout
        self.mod = mod

        self.tree = KDTree(self.vid_coor_nor.values())
        self.embedder_u = nn.Embedding(self.u_size, self.emb_dim_u)
        self.embedder_v = nn.Embedding(self.v_size, self.emb_dim_v)
        self.embedder_t = nn.Embedding(self.t_size, self.emb_dim_t)
        self.rid_sampling_info = {}
        self.rnn_short = nn.RNNCell(self.emb_dim_v, self.hidden_dim)
        self.rnn_long = nn.GRUCell(self.emb_dim_v, self.hidden_dim)
        self.decoder_hl = IndexLinear(self.hidden_dim, v_size)
        self.decoder_hs = IndexLinear(self.hidden_dim, v_size)
        self.decoder_t = IndexLinear(self.emb_dim_t, v_size)
        self.decoder_u = IndexLinear(self.emb_dim_u, v_size)
        if self.mod == 0:
            self.merger_weight = nn.Parameter(torch.ones(1, 5) / 5.0)
        elif self.mod == 1:
            self.merger_weight = nn.Parameter(torch.ones(1, 6) / 6.0)
        elif self.mod in {2, 3}:
            self.merger_weight_al = []
            for _ in xrange(7):
                self.merger_weight_al.append(nn.Parameter(torch.ones(1, 6) / 6.0))
        self.att_dim = self.emb_dim_t + self.hidden_dim * 2
        self.att_M = nn.Parameter(torch.ones(self.att_dim, self.att_dim) / self.att_dim)  # TODO change back
        for i in xrange(self.att_dim):
            for j in xrange(self.att_dim):
                if i < self.hidden_dim and j < self.hidden_dim:
                    continue
                if i >= self.hidden_dim and i < self.hidden_dim * 2 and j >= self.hidden_dim and j < self.hidden_dim * 2:
                    continue
                if i >= self.hidden_dim * 2 and j >= self.hidden_dim * 2:
                    continue
                self.att_M.data[i, j] = 0.0
        self.att_merger = nn.Linear(2, 1, bias=None)
        self.att_merger.weight.data[0, 0] = 0.5
        self.att_merger.weight.data[0, 1] = -0.5
Ejemplo n.º 3
0
 def __init__(self, u_size, v_size, emb_dim=50, nb_cnt=100, sampling_list=None, mod=0):
     super(JNTM, self).__init__()
     self.emb_dim = emb_dim
     self.u_size = u_size
     self.v_size = v_size
     self.nb_cnt = nb_cnt
     self.sampling_list = sampling_list
     self.mod = mod
     self.rnn_cell = nn.RNNCell(emb_dim, emb_dim)
     self.gru_cell = nn.GRUCell(emb_dim, emb_dim)
     self.embedder_u = nn.Embedding(u_size, emb_dim)
     self.embedder_v = nn.Embedding(v_size, emb_dim)
     if mod == 0:
         self.decoder = IndexLinear(emb_dim * 3, v_size)
     else:
         self.decoder = IndexLinear(emb_dim * 2, v_size)
Ejemplo n.º 4
0
 def __init__(self, u_size, v_size, t_size, w_size, emb_dim_all=50, emb_dim_v=50, emb_dim_t=50, emb_dim_w=50,
              nb_cnt=100, sampling_list=None, glove_path=None, mod=1):
     super(SERM, self).__init__()
     self.v_size = v_size
     self.emb_dim_all = emb_dim_all
     self.emb_dim_v = emb_dim_v
     self.emb_dim_t = emb_dim_t
     self.emb_dim_w = emb_dim_w
     self.mod = mod
     self.nb_cnt = min((nb_cnt, v_size))
     self.sampling_list = sampling_list
     self.embedder_v = nn.Embedding(v_size, emb_dim_v)
     self.embedder_t = nn.Embedding(t_size, emb_dim_t)
     self.embedder_u = nn.Embedding(u_size, v_size)
     if mod == 0:
         self.embedder_w = nn.Embedding(w_size, emb_dim_w)
         self.gru_cell = nn.GRUCell(emb_dim_v + emb_dim_t + emb_dim_w, emb_dim_all)
         # read glove pre-trained embeddings
         if glove_path is not None:
             glove_file = open(glove_path, 'rt', -1)
             for line in glove_file:
                 wid = string.atoi(line[0: line.index('\t')])
                 probs = line[line.index('\t') + 1: -1].split(' ')
                 for i in range(emb_dim_w):
                     prob = string.atof(probs[i])
                     self.embedder_w.weight.data[wid, i] = prob
             glove_file.close()
     else:
         self.gru_cell = nn.GRUCell(emb_dim_v + emb_dim_t, emb_dim_all)
     self.decoder = IndexLinear(emb_dim_all, v_size)
Ejemplo n.º 5
0
 def __init__(self, u_size, v_size, t_size, emb_dim_u=32, emb_dim_v=32, emb_dim_t=16, hidden_dim=32, nb_cnt=100, sampling_list=None, vid_coor_rad=None, vid_pop=None, dropout=0.5, mod=0):
     super(SpatioTemporalModel, self).__init__()
     self.emb_dim_u = emb_dim_u
     self.emb_dim_v = emb_dim_v
     self.emb_dim_t = emb_dim_t
     self.hidden_dim = hidden_dim
     self.u_size = u_size
     self.v_size = v_size
     self.t_size = t_size
     self.nb_cnt = nb_cnt
     self.dropout = dropout
     self.sampling_list = sampling_list
     self.vid_coor_rad = vid_coor_rad
     self.vid_pop = vid_pop
     self.tree = BallTree(vid_coor_rad.values(), leaf_size=40, metric='haversine')
     self.dist_metric = DistanceMetric.get_metric('haversine')
     self.rid_sampling_info = {}
     self.rnn_short = nn.RNNCell(self.emb_dim_v, self.hidden_dim)
     self.rnn_long = nn.GRUCell(self.emb_dim_v, self.hidden_dim)
     self.embedder_u = nn.Embedding(self.u_size, self.emb_dim_u)
     self.embedder_v = nn.Embedding(self.v_size, self.emb_dim_v)
     self.embedder_t = nn.Embedding(self.t_size, self.emb_dim_t)
     # self.embedder_t.weight = nn.Parameter(0.001 * torch.randn(self.embedder_t.weight.size()))
     dim_merged = self.hidden_dim * 2 + self.emb_dim_u + self.emb_dim_t if mod != -1 else self.hidden_dim * 2 + self.emb_dim_u
     self.decoder = IndexLinear(dim_merged, v_size)
     self.mod = mod
Ejemplo n.º 6
0
 def __init__(self,
              u_size,
              v_size,
              t_size,
              emb_dim=50,
              nb_cnt=100,
              sampling_list=None,
              vid_coor_rad=None,
              dropout=0.5,
              mod=0,
              mod_merge=0):
     super(TimeAwareModel, self).__init__()
     self.emb_dim = emb_dim
     self.u_size = u_size
     self.v_size = v_size
     self.t_size = t_size
     self.nb_cnt = nb_cnt
     self.dropout = dropout
     self.sampling_list = sampling_list
     self.vid_coor_rad = vid_coor_rad
     if self.vid_coor_rad is not None:
         self.tree = BallTree(vid_coor_rad.values(),
                              leaf_size=40,
                              metric='haversine')
         self.dist_metric = DistanceMetric.get_metric('haversine')
     self.uid_rid_nbs = {}
     for uid in range(0, u_size):
         self.uid_rid_nbs[uid] = {}
     self.mod = mod
     self.mod_merge = mod_merge
     self.rnn_short = nn.RNNCell(emb_dim, emb_dim)
     self.rnn_long = nn.GRUCell(emb_dim, emb_dim)
     self.embedder_u = nn.Embedding(u_size, emb_dim)
     self.embedder_v = nn.Embedding(v_size, emb_dim)
     if mod == 0:  #mod 0: cat(u, long, short)
         self.decoder = IndexLinear(emb_dim * 3, v_size)
     elif mod == 1:  #mod 1: cat(u, merge)
         self.rnn_merge = MergeRNNCell(emb_dim,
                                       emb_dim,
                                       mod_merge=mod_merge)
         self.decoder = IndexLinear(emb_dim * 2, v_size)
     elif mod == 2:  #mod 2: cat(u, t_next, long, short)
         self.embedder_t = nn.Embedding(t_size, emb_dim)
         self.decoder = IndexLinear(emb_dim * 4, v_size)
     elif mod == 3:  #mod 1: cat(u, t_next, merge)
         self.rnn_merge = MergeRNNCell(emb_dim,
                                       emb_dim,
                                       mod_merge=mod_merge)
         self.decoder = IndexLinear(emb_dim * 3, v_size)
     elif mod == 4:
         self.embedder_t = nn.Embedding(t_size, emb_dim)
         self.decoder = IndexLinear(emb_dim * 4, v_size)
         self.embedder_gap_time = nn.Embedding(12, 2)
         self.merger = nn.Linear(2, 1)
     elif mod == 5:
         self.embedder_t = nn.Embedding(t_size, emb_dim)
         self.decoder = IndexLinear(emb_dim * 4, v_size)
         self.embedder_gap_time = nn.Embedding(12, 2)
         self.merger = nn.Linear(2, 1)
Ejemplo n.º 7
0
 def __init__(self, u_size, v_size, t_size, opt):
     super(RNNDecoder, self).__init__()
     self.u_size = u_size
     self.v_size = v_size
     self.t_size = t_size
     self.emb_dim_u = opt['emb_dim_u']
     self.emb_dim_v = opt['emb_dim_v']
     self.emb_dim_t = opt['emb_dim_v']
     self.hidden_dim = opt['hidden_dim']
     self.nb_cnt = opt['nb_cnt']
     self.rnn_short = nn.RNN(self.emb_dim_v, self.hidden_dim)
     self.rnn_long = nn.GRU(self.emb_dim_v, self.hidden_dim)
     self.embedder_u = nn.Embedding(self.u_size, self.emb_dim_u)
     self.embedder_v = nn.Embedding(self.v_size, self.emb_dim_v)
     self.embedder_t = nn.Embedding(self.t_size, self.emb_dim_t)
     dim_merged = self.hidden_dim * 2 + self.emb_dim_u + self.emb_dim_t
     self.decoder = IndexLinear(dim_merged, v_size)
    def __init__(self,
                 u_size,
                 v_size,
                 t_size,
                 emb_dim_u=64,
                 emb_dim_v=64,
                 emb_dim_t=32,
                 hidden_dim=64,
                 nb_cnt=100,
                 sampling_list=None,
                 vid_coor_rad=None,
                 vid_pop=None,
                 dropout=0.5):
        super(SpatioTemporalModelDistance, self).__init__()
        self.emb_dim_u = emb_dim_u
        self.emb_dim_v = emb_dim_v
        self.emb_dim_t = emb_dim_t
        self.hidden_dim = hidden_dim
        self.u_size = u_size
        self.v_size = v_size
        self.t_size = t_size
        self.nb_cnt = nb_cnt
        self.dropout = dropout
        self.sampling_list = sampling_list
        self.vid_coor_rad = vid_coor_rad
        self.vid_pop = vid_pop
        self.tree = BallTree(vid_coor_rad.values(),
                             leaf_size=40,
                             metric='haversine')
        self.dist_metric = DistanceMetric.get_metric('haversine')
        self.uid_rid_sampling_info = {}
        for uid in range(0, u_size):
            self.uid_rid_sampling_info[uid] = {}

        self.rnn_short = nn.RNNCell(self.emb_dim_v,
                                    self.hidden_dim)  #TODO check GRU
        self.rnn_long = nn.GRUCell(self.emb_dim_v, self.hidden_dim)
        self.embedder_u = nn.Embedding(self.u_size, self.emb_dim_u)
        self.embedder_v = nn.Embedding(self.v_size, self.emb_dim_v)
        self.embedder_t = nn.Embedding(self.t_size, self.emb_dim_t)
        dim_merged = self.hidden_dim * 2 + self.emb_dim_u + self.emb_dim_t * 2
        self.decoder = IndexLinear(dim_merged, v_size)
Ejemplo n.º 9
0
    def __init__(self,
                 u_size,
                 v_size,
                 t_size,
                 emb_dim_u=32,
                 emb_dim_v=32,
                 emb_dim_t=16,
                 hidden_dim=32,
                 nb_cnt=15,
                 sampling_list=None,
                 vid_coor_nor=None,
                 vid_pop=None,
                 dropout=0.5,
                 mod=0):
        super(AttentionModel, self).__init__()
        self.u_size = u_size
        self.v_size = v_size
        self.t_size = t_size
        self.emb_dim_u = emb_dim_u
        self.emb_dim_v = emb_dim_v
        self.emb_dim_t = emb_dim_t
        self.hidden_dim = hidden_dim
        self.nb_cnt = nb_cnt
        self.sampling_list = sampling_list
        self.vid_coor_nor = vid_coor_nor
        self.vid_pop = vid_pop
        self.dropout = dropout
        self.mod = mod

        self.tree = KDTree(self.vid_coor_nor.values())
        self.embedder_u = nn.Embedding(self.u_size, self.emb_dim_u)
        self.embedder_v = nn.Embedding(self.v_size, self.emb_dim_v)
        self.embedder_t = nn.Embedding(self.t_size, self.emb_dim_t)
        self.rid_sampling_info = {}
        self.rnn_short = nn.RNNCell(self.emb_dim_v, self.hidden_dim)
        self.rnn_long = nn.GRUCell(self.emb_dim_v, self.hidden_dim)
        self.decoder_h = IndexLinear(self.hidden_dim * 2, v_size)
        self.decoder_t = IndexLinear(self.emb_dim_t, v_size)
        self.decoder_u = IndexLinear(self.emb_dim_u, v_size)
        if self.mod == 0:
            self.merger_weight = nn.Parameter(torch.ones(1, 4) / 4.0)
            # self.merger = nn.Linear(4, 1, bias=False)   # u, t, h, d --> score
        elif self.mod == 1:
            self.merger_weight = nn.Parameter(torch.ones(1, 5) / 5.0)
            # self.merger = nn.Linear(5, 1, bias=False)
        elif self.mod == 2:
            self.merger_weight = nn.Parameter(torch.ones(6, 5) / 5.0)
            # self.merger_al = []
            # for _ in xrange(6):
            #     self.merger_al.append(nn.Linear(5, 1, bias=False))
        # print self.merger_weight
        self.att_dim = self.emb_dim_t + self.hidden_dim * 2
        self.att_M = nn.Parameter(
            torch.ones(self.att_dim, self.att_dim) / self.att_dim)
        for i in xrange(self.att_dim):
            for j in xrange(self.att_dim):
                if i < self.hidden_dim and j < self.hidden_dim:
                    continue
                if i >= self.hidden_dim and i < self.hidden_dim * 2 and j > self.hidden_dim and j < self.hidden_dim * 2:
                    continue
                if i >= self.hidden_dim * 2 and j > self.hidden_dim * 2:
                    continue
                self.att_M.data[i, j] = 0.0