Esempio n. 1
0
 def __init__(self, dim, activation=tf.nn.relu, renorm=False, **kwargs):
   super(SingleAttentionAggregator, self).__init__(**kwargs)
   self.dense = layers.Dense(dim, use_bias=False)
   self.self_layer = layers.Dense(1, use_bias=False)
   self.neigh_layer = layers.Dense(1, use_bias=False)
   self.activation = activation
   self.renorm = renorm
Esempio n. 2
0
 def __init__(self,
              metapaths_of_groups,
              fanouts,
              dim,
              feature_ixs,
              feature_dims,
              aggregator='mean',
              concat=False,
              share_aggregator=False,
              *args,
              **kwargs):
     super(LasGNN, self).__init__(*args, **kwargs)
     shared_embeddings = encoders.SparseSageEncoder.create_sparse_embeddings(
         feature_dims)
     if share_aggregator:
         shared_aggregators = encoders.SparseSageEncoder.create_aggregators(
             dim, len(fanouts), aggregator, concat=concat)
     else:
         shared_aggregators = None
     self._sparse_sage_encoders = [[
         encoders.SparseSageEncoder(metapath,
                                    fanouts,
                                    dim,
                                    feature_ixs,
                                    feature_dims,
                                    shared_embeddings=shared_embeddings,
                                    aggregator=aggregator,
                                    concat=concat,
                                    shared_aggregators=shared_aggregators)
         for metapath in metapaths_of_group
     ] for metapaths_of_group in metapaths_of_groups]
     self._attention_of_group = [Attention() for _ in metapaths_of_group]
     self._target_feed_forward = layers.Dense(dim)
     self._context_feed_forward = layers.Dense(dim)
Esempio n. 3
0
 def __init__(self, dim, activation=tf.nn.relu, concat=False, **kwargs):
   super(MeanAggregator, self).__init__(**kwargs)
   if concat:
     dim //= 2
   self.concat = concat
   self.self_layer = layers.Dense(dim, activation=activation, use_bias=False)
   self.neigh_layer = layers.Dense(dim, activation=activation, use_bias=False)
Esempio n. 4
0
    def __init__(self,
                 node_type,
                 path_patterns,
                 max_id,
                 dim,
                 sparse_feature_dims,
                 feature_ids,
                 feature_embedding_dim=16,
                 walk_len=3,
                 left_win_size=1,
                 right_win_size=1,
                 num_negs=5,
                 gamma=5,
                 *args,
                 **kwargs):
        super(LsHNE, self).__init__(node_type, path_patterns, max_id, *args,
                                    **kwargs)
        self.node_type = node_type
        self.path_patterns = path_patterns
        self.max_id = max_id
        self.dim = dim
        self.walk_len = walk_len
        self.left_win_size = left_win_size
        self.right_win_size = right_win_size
        self.num_negs = num_negs
        self.view_num = len(path_patterns)
        if self.view_num < 1:
            raise ValueError('View Number must be bigger than 1, got{}'.format(
                self.view_num))
        if not isinstance(sparse_feature_dims, list):
            raise TypeError(
                'Expect list for sparse feature dimsgot {}.'.format(
                    type(sparse_feature_dims).__name__))
        self.sparse_feature_dims = sparse_feature_dims
        self.feature_ids = feature_ids
        self.feature_embedding_dim = feature_embedding_dim
        self.raw_fdim = feature_embedding_dim * len(feature_ids)
        self.feature_embedding_layer = []
        for d in sparse_feature_dims:
            self.feature_embedding_layer.append(
                layers.SparseEmbedding(d,
                                       feature_embedding_dim,
                                       combiner="sum"))

        self.hidden_layer = [{}] * self.view_num
        for i in range(0, self.view_num):
            self.hidden_layer[i]['src'] = layers.Dense(256)
            self.hidden_layer[i]['tar'] = layers.Dense(256)
        self.out_layer = [{}] * self.view_num
        for i in range(0, self.view_num):
            self.out_layer[i]['src'] = layers.Dense(self.dim)
            self.out_layer[i]['tar'] = layers.Dense(self.dim)

        self.att_vec = tf.get_variable(
            'att_vec',
            shape=[self.view_num, self.dim],
            initializer=tf.truncated_normal_initializer(stddev=0.1))
        self.gamma = gamma
Esempio n. 5
0
 def __init__(self, dim, activation=tf.nn.relu, concat=False, **kwargs):
   super(BaseAggregator, self).__init__(**kwargs)
   if concat:
     if dim % 2:
       raise ValueError('dim must be divided exactly by 2 if concat is True.')
     dim //= 2
   self.concat = concat
   self.self_layer = layers.Dense(dim, activation=activation, use_bias=False)
   self.neigh_layer = layers.Dense(dim, activation=activation, use_bias=False)
Esempio n. 6
0
 def __init__(self, node_type, edge_type, max_id, *args, **kwargs):
     super(BaseGraphSage_SE, self).__init__(node_type, edge_type, max_id,
                                            *args, **kwargs)
     dim = self.embedding_dim()
     self.sim_to_cor = [
         layers.Dense(dim, activation=tf.nn.relu, use_bias=True)
         for _ in range(2)
     ]
     self.cor_to_sim = [
         layers.Dense(dim, activation=tf.nn.relu, use_bias=True)
         for _ in range(2)
     ]
Esempio n. 7
0
    def __init__(self,
                 metapath,
                 dim,
                 aggregator='mean',
                 feature_idx=-1,
                 feature_dim=0,
                 max_id=-1,
                 use_feature=True,
                 use_id=False,
                 use_residual=False,
                 **kwargs):
        super(GCNEncoder, self).__init__(**kwargs)
        self.metapath = metapath
        self.num_layers = len(metapath)

        self.use_id = use_id
        self.use_feature = use_feature
        self.use_residual = use_residual
        if use_id:
            self.id_layer = layers.Embedding(max_id, dim)
        if use_feature and use_residual:
            self.feature_layer = layers.Dense(dim, use_bias=False)
        self.feature_idx = feature_idx
        self.feature_dim = feature_dim

        self.aggregators = []
        aggregator_class = sparse_aggregators.get(aggregator)
        for layer in range(self.num_layers):
            activation = tf.nn.relu if layer < self.num_layers - 1 else None
            self.aggregators.append(
                aggregator_class(dim, activation=activation))
Esempio n. 8
0
    def __init__(self,
                 label_idx,
                 label_dim,
                 num_classes=None,
                 sigmoid_loss=False,
                 **kwargs):
        super(SupervisedModel, self).__init__()
        self.label_idx = label_idx
        self.label_dim = label_dim
        if num_classes is None:
            num_classes = label_dim
        if label_dim > 1 and label_dim != num_classes:
            raise ValueError('laben_dim must match num_classes.')
        self.num_classes = num_classes
        self.sigmoid_loss = sigmoid_loss

        self.predict_layer = layers.Dense(num_classes)
Esempio n. 9
0
 def __init__(self,
              dim,
              feature_idx=-1,
              feature_dim=0,
              max_id=-1,
              use_feature=True,
              use_id=False,
              **kwargs):
     super(ShallowEncoder, self).__init__(**kwargs)
     if not use_feature and not use_id:
         raise ValueError('Either use_feature or use_id must be True.')
     self.dim = dim
     self.use_id = use_feature
     self.use_feature = use_feature
     if use_id:
         self.embedding = layers.Embedding(dim, max_id)
     if use_feature:
         self.dense = layers.Dense(self.dim)
     self.feature_idx = feature_idx
     self.feature_dim = feature_dim
Esempio n. 10
0
    def __init__(self,
                 dim=None,
                 feature_idx=-1,
                 feature_dim=0,
                 max_id=-1,
                 sparse_feature_idx=-1,
                 sparse_feature_max_id=-1,
                 embedding_dim=16,
                 use_hash_embedding=False,
                 combiner='concat',
                 **kwargs):
        super(ShallowEncoder, self).__init__(**kwargs)

        if combiner not in ['add', 'concat']:
            raise ValueError('combiner must be \'add\' or \'concat\'.')
        if combiner == 'add' and dim is None:
            raise ValueError('add must be used with dim provided.')

        use_feature = feature_idx != -1
        use_id = max_id != -1
        use_sparse_feature = sparse_feature_idx != -1

        if isinstance(feature_idx, int) and use_feature:
            feature_idx = [feature_idx]
        if isinstance(feature_dim, int) and use_feature:
            feature_dim = [feature_dim]
        if use_feature and len(feature_idx) != len(feature_dim):
            raise ValueError(
                'feature_dim must be the same length as feature_idx.')

        if isinstance(sparse_feature_idx, int) and use_sparse_feature:
            sparse_feature_idx = [sparse_feature_idx]
        if isinstance(sparse_feature_max_id, int) and use_sparse_feature:
            sparse_feature_max_id = [sparse_feature_max_id]
        if use_sparse_feature and \
           len(sparse_feature_idx) != len(sparse_feature_max_id):
            raise ValueError(
                'sparse_feature_idx must be the same length as'
                'sparse_feature_max_id.', len(sparse_feature_idx),
                len(sparse_feature_max_id), sparse_feature_idx,
                sparse_feature_max_id)

        embedding_num = (1 if use_id else 0) + \
                        (len(sparse_feature_idx) if use_sparse_feature else 0)

        if combiner == 'add':
            embedding_dim = dim
        if isinstance(embedding_dim, int) and embedding_num:
            embedding_dim = [embedding_dim] * embedding_num
        if embedding_num and len(embedding_dim) != embedding_num:
            raise ValueError('length of embedding_num must be int(use_id) + '
                             'len(sparse_feature_idx)')

        if isinstance(use_hash_embedding, bool) and embedding_num:
            use_hash_embedding = [use_hash_embedding] * embedding_num
        if embedding_num and len(use_hash_embedding) != embedding_num:
            raise ValueError(
                'length of use_hash_embedding must be int(use_id) + '
                'len(sparse_feature_idx)')

        # model architechture
        self.dim = dim
        self.use_id = use_id
        self.use_feature = use_feature
        self.use_sparse_feature = use_sparse_feature
        self.combiner = combiner

        # feature fetching parameters
        self.feature_idx = feature_idx
        self.feature_dim = feature_dim
        self.sparse_feature_idx = sparse_feature_idx
        self.sparse_feature_max_id = sparse_feature_max_id
        self.embedding_dim = embedding_dim

        # sub-layers
        if dim:
            self.dense = layers.Dense(self.dim, use_bias=False)

        if use_id:
            embedding_class = \
                layers.HashEmbedding if use_hash_embedding[0] else layers.Embedding
            self.embedding = embedding_class(max_id + 1, embedding_dim[0])
            embedding_dim = embedding_dim[1:]
            use_hash_embedding = use_hash_embedding[1:]
        if use_sparse_feature:
            self.sparse_embeddings = []
            for max_id, dim, use_hash in zip(sparse_feature_max_id,
                                             embedding_dim,
                                             use_hash_embedding):
                sparse_embedding_class = \
                    layers.HashSparseEmbedding if use_hash else layers.SparseEmbedding
                self.sparse_embeddings.append(
                    sparse_embedding_class(max_id + 1, dim))
Esempio n. 11
0
 def __init__(self, dim, activation=tf.nn.relu, renorm=False, **kwargs):
   super(GCNAggregator, self).__init__(**kwargs)
   self.renorm = renorm
   self.dense = layers.Dense(dim, activation=activation, use_bias=False)
Esempio n. 12
0
 def __init__(self, dim, *args, **kwargs):
   super(BasePoolAggregator, self).__init__(dim, *args, **kwargs)
   self.layers = [layers.Dense(dim, activation=tf.nn.relu)]