def __init__(self, opts, arch, use_bias=True, initializers=None, regularizers=None, custom_getter=None, name="graphnn"): """ Input: - opts (options) - object with all relevant options stored - arch (ArchParams) - object with all relevant Architecture options - use_bias (boolean, optional) - have biases in the network (default True) - intializers (dict, optional) - specify custom initializers - regularizers (dict, optional) - specify custom regularizers - custom_getter (dict, optional) - specify custom getters - name (string, optional) - name for module for scoping (default graphnn) """ super(GraphLongSkipLayerNetwork, self).__init__(custom_getter=custom_getter, name=name) self._nlayers = len(arch.layer_lens) final_regularizers = None if regularizers is not None: lin_regularizers = { k: v for k, v in regularizers.items() if k in ["w", "b"] } else: lin_regularizers = None self._layers = [ layers.GraphSkipLayer(output_size=layer_len, activation=arch.activ, initializers=initializers, regularizers=regularizers, name="{}/graph_skip".format(name)) for layer_len in arch.layer_lens ] + [ layers.EmbeddingLinearLayer(output_size=opts.final_embedding_dim, initializers=initializers, regularizers=lin_regularizers, name="{}/embed_lin".format(name)) ] self._skip_layer_idx = arch.skip_layers self._skip_layers = [ layers.EmbeddingLinearLayer(output_size=arch.layer_lens[skip_idx], initializers=initializers, regularizers=lin_regularizers, name="{}/skip".format(name)) for skip_idx in self._skip_layer_idx ] self.normalize_emb = arch.normalize_emb
def __init__(self, opts, arch, use_bias=True, initializers=None, regularizers=None, custom_getter=None, name="graphnn"): """ Input: - opts (options) - object with all relevant options stored - arch (ArchParams) - object with all relevant Architecture options - use_bias (boolean, optional) - have biases in the network (default True) - intializers (dict, optional) - specify custom initializers - regularizers (dict, optional) - specify custom regularizers - custom_getter (dict, optional) - specify custom getters - name (string, optional) - name for module for scoping (default graphnn) """ super(GraphConvLayerNetwork, self).__init__(custom_getter=custom_getter, name=name) self._nlayers = len(arch.layer_lens) self._layers = [ layers.GraphConvLayer(output_size=layer_len, activation=arch.activ, initializers=initializers, regularizers=regularizers, name="{}/graph_conv".format(name)) for layer_len in arch.layer_lens ] + [ layers.EmbeddingLinearLayer(output_size=opts.final_embedding_dim, initializers=initializers, regularizers=regularizers, name="{}/embed_lin".format(name)) ] self.normalize_emb = arch.normalize_emb
def __init__(self, opts, arch, use_bias=True, initializers=None, regularizers=None, custom_getter=None, name="graphnn"): super(GraphSkipHopNormedNetwork, self).__init__(opts, arch, use_bias=use_bias, initializers=initializers, regularizers=regularizers, custom_getter=custom_getter, name=name) lin_regularizers = None if regularizers is not None: lin_regularizers = { k:v for k, v in regularizers.items() if k in ["w", "b"] } self._hop_layers = [ layers.EmbeddingLinearLayer( output_size=arch.layer_lens[skip_idx], initializers=initializers, regularizers=lin_regularizers, name="{}/hop".format(name)) for skip_idx in self._skip_layer_idx[1:] ]
def __init__(self, opts, arch, use_bias=True, initializers=None, regularizers=None, custom_getter=None, name="graphnn"): super(GraphSkipLayerNetwork, self).__init__(custom_getter=custom_getter, name=name) self._nlayers = len(arch.layer_lens) final_regularizers = None if regularizers is not None: final_regularizers = { k:v for k, v in regularizers.items() if k in ["w", "b"] } self._layers = [ layers.GraphSkipLayer( output_size=layer_len, activation=arch.activ, initializers=initializers, regularizers=regularizers, name="{}/graph_skip".format(name)) for layer_len in arch.layer_lens ] + [ layers.EmbeddingLinearLayer( output_size=opts.final_embedding_dim, initializers=initializers, regularizers=final_regularizers, name="{}/embed_lin".format(name)) ] self.normalize_emb = arch.normalize_emb
def __init__(self, opts, arch, use_bias=True, initializers=None, regularizers=None, custom_getter=None, name="graphnn"): """ Input: - opts (options) - object with all relevant options stored - arch (ArchParams) - object with all relevant Architecture options - use_bias (boolean, optional) - have biases in the network (default True) - intializers (dict, optional) - specify custom initializers - regularizers (dict, optional) - specify custom regularizers - custom_getter (dict, optional) - specify custom getters - name (string, optional) - name for module for scoping (default graphnn) """ super(GraphSkipHopNormedNetwork, self).__init__(opts, arch, use_bias=use_bias, initializers=initializers, regularizers=regularizers, custom_getter=custom_getter, name=name) lin_regularizers = None if regularizers is not None: lin_regularizers = { k: v for k, v in regularizers.items() if k in ["w", "b"] } self._hop_layers = [ layers.EmbeddingLinearLayer(output_size=arch.layer_lens[skip_idx], initializers=initializers, regularizers=lin_regularizers, name="{}/hop".format(name)) for skip_idx in self._skip_layer_idx[1:] ]