示例#1
0
    def BuildGraphEmeddingConcatenation(
        self,
        forward_layer: Layer = None,
        backward_layer: Layer = None,
        hidden_dim: int = 100,
        kernel_init: str = 'glorot_uniform',
        act: activations = activations.softmax,
        kernel_regularizer: regularizers = regularizers.l2(0.01),
        activity_regularizer: regularizers = regularizers.l1(0.01)):
        """
        This functions builds the node embedding to graph embedding sub model and is element of the encoder structure.
            :param forward_layer:Layer: previous forward layer
            :param backward_layer:Layer: previous backward layer
            :param hidden_dim:int: hidden dimension depends on the embedding dimension e.g. GloVe vector length used. [Default 100]
            :param kernel_init:str: kernel initializer [Default glorot_uniform]
            :param act:activations: activation function [Default relu]
            #:param kernel_regularizer:regularizers: kernel regularizers [Default l2(0.01)]
            #:param activity_regularizer:regularizers: activity regularizers [Default l1(0.01)] 
        """
        try:
            AssertNotNone(forward_layer, 'forward_layer')
            AssertNotNone(backward_layer, 'backward_layer')
            concat = concatenate([forward_layer, backward_layer],
                                 name="fw_bw_concatenation",
                                 axis=1)
            """
    	    Deprecated!

            hidden = Dense( hidden_dim, 
                            kernel_initializer=kernel_init,
                            activation=act,
                            kernel_regularizer=kernel_regularizer,
                            activity_regularizer=activity_regularizer,
                            name="concatenation_act")(concat)
            """

            hidden = Dense(units=hidden_dim,
                           activation=act,
                           name='reduce_concatenation_act')(concat)

            concat_pool = None
            if (not self.input_is_2d):
                concat_pool = GlobalMaxPooling1D(
                    data_format='channels_last',
                    name='concat_max_pooling')(hidden)
            else:
                concat_pool = Lambda(
                    lambda x: K.reshape(K.max(x, axis=0), (-1, hidden_dim)),
                    name='concat_max_pool')(hidden)

            graph_embedding_state_h = concat_pool
            graph_embedding_state_c = concat_pool

            return [hidden, graph_embedding_state_h, graph_embedding_state_c]
        except Exception as ex:
            template = "An exception of type {0} occurred in [ModelBuilder.BuildGraphEmeddingConcatenation]. Arguments:\n{1!r}"
            message = template.format(type(ex).__name__, ex.args)
            print(message)
示例#2
0
 def MakeModel(self, layers: Layer):
     """
     This function creates the model by given inputs.
         :param layers:Layer: structure that defines your model
     """
     try:
         inputs = self.get_inputs()
         AssertNotNone(inputs, 'inputs')
         AssertNotNone(layers, 'layers')
         return Model(inputs=inputs, outputs=layers)
     except Exception as ex:
         template = "An exception of type {0} occurred in [ModelBuilder.MakeModel]. Arguments:\n{1!r}"
         message = template.format(type(ex).__name__, ex.args)
         print(message)
示例#3
0
 def get_inputs(self):
     """
     This getter returns the encoder and decoder inputs in exactly this order [encoder, decoder].
     """
     try:
         AssertNotNone(self.encoder_inputs, 'encoder inputs')
         AssertNotNone(self.decoder_inputs, 'decoder inputs')
         return [
             self.encoder_inputs[0], self.encoder_inputs[1],
             self.encoder_inputs[2], self.decoder_inputs
         ]
     except Exception as ex:
         template = "An exception of type {0} occurred in [ModelBuilder.get_inputs]. Arguments:\n{1!r}"
         message = template.format(type(ex).__name__, ex.args)
         print(message)
    def GetAllSamplesAggregatedFeatures(self):
        """
        This function collects and aggregates all given features next hop neighbourhood feature vectors for all samples.
        Attention!
            => since the neighbourhood tensors have to be for each sample an (MxM) matrix i collect the dim from last shape value!
        """
        try:
            samples_results = []
            dims = len(self.neighbouring.shape)
            vecs = self.neighbouring.shape[dims - 1]

            for sample in range(self.features.shape[0].value):

                sample_concatenate = None
                sample_features = self.features[sample, :, :] if (
                    dims > 2) else self.features
                sample_neigbourhood = self.neighbouring[sample, :, :] if (
                    dims > 2) else self.neighbouring

                agg_f_vecs = self.GetSamplesAggregatedFeatures(
                    features=sample_features,
                    neighbourhood=sample_neigbourhood,
                    features_size=vecs)
                sample_concatenate = K.concatenate(
                    [sample_features, agg_f_vecs])

                AssertNotNone(sample_concatenate, 'sample_concatenate')
                samples_results.append(sample_concatenate)

            assert samples_results, 'No results calculated for feature and neighbourhood samples!'
            return samples_results
        except Exception as ex:
            template = "An exception of type {0} occurred in [NeighbourhoodCollector.GetAllSamplesAggregatedFeatures]. Arguments:\n{1!r}"
            message = template.format(type(ex).__name__, ex.args)
            print(message)
    def GetSamplesAggregatedFeatures(self, features, neighbourhood,
                                     features_size: int):
        """
        This function collects and aggregates all given features next hop neighbourhood feature vectors for 1 sample.
            :param features: samples previous or initial features
            :param neighbourhood: samples graph neighbourhood look up
            :param features_size:int: amount of features in the sample
        """
        try:
            agg_f_vecs = None
            for i in range(features_size):
                found_neighbour_vectors = self.GetVectorNeighbours(
                    features, neighbourhood, i)
                aggregator_result = Aggregators(found_neighbour_vectors,
                                                self.axis,
                                                self.aggregator).Execute()
                AssertNotNone(aggregator_result, 'aggregator_result')

                if (agg_f_vecs is None):
                    agg_f_vecs = [aggregator_result]
                else:
                    agg_f_vecs.append(aggregator_result)

            agg_f_vecs = agg_f_vecs[0] if (
                features_size < 2) else K.concatenate(agg_f_vecs)

            return K.transpose(K.reshape(agg_f_vecs, (features_size, -1)))
        except Exception as ex:
            template = "An exception of type {0} occurred in [NeighbourhoodCollector.GetSamplesAggregatedFeatures]. Arguments:\n{1!r}"
            message = template.format(type(ex).__name__, ex.args)
            print(message)
示例#6
0
 def Plot(self,
          model: training.Model,
          file_name: str,
          show_shapes: bool = True):
     """
     This function plots and store a given model to desired file.
         :param model:Model: keras model
         :param file_name:str: name of the image file
         :param show_shapes:bool =True: show layer shape in the graph
     """
     try:
         AssertNotNone(model, 'plotting_tensor'), 'Plotting model was None!'
         AssertNotNone(file_name,
                       'name_plot_file'), 'Plot file name was None!'
         plot_model(model, to_file=file_name, show_shapes=show_shapes)
     except Exception as ex:
         template = "An exception of type {0} occurred in [ModelBuilder.Plot]. Arguments:\n{1!r}"
         message = template.format(type(ex).__name__, ex.args)
         print(message)
    def GetVectorNeighbours(self, features, neighbouring, index: int):
        """
        This function collects the neighbourhood vectors for a spezific vertex given by index.
            :param features: previous features
            :param neighbouring: neighbourhood look up
            :param index:int: viewed feature index
        """
        try:
            AssertNotNegative(index)
            AssertNotNone(features, 'features')
            AssertNotNone(neighbouring, 'neighbouring look-up')

            neighbouring = neighbouring[index, :]
            neighbouring = K.reshape(neighbouring, (neighbouring.shape[0], -1))
            return multiply([features, neighbouring])
        except Exception as ex:
            template = "An exception of type {0} occurred in [NeighbourhoodCollector.GetVectorNeighbours]. Arguments:\n{1!r}"
            message = template.format(type(ex).__name__, ex.args)
            print(message)
示例#8
0
 def Summary(self, model: training.Model):
     """
     This function prints the summary of a model.
         :param model:training.Model: a build model
     """
     try:
         AssertNotNone(model, 'plotting_tensor'), 'Plotting model was None!'
         print(model.summary(line_length=200))
     except Exception as ex:
         template = "An exception of type {0} occurred in [ModelBuilder.Summary]. Arguments:\n{1!r}"
         message = template.format(type(ex).__name__, ex.args)
         print(message)
示例#9
0
    def NhoodLambdaLayer(self,
                         features,
                         look_up,
                         hop: int,
                         aggregator: str = 'mean',
                         layer_name: str = 'Nhood_Layer'):
        """
        This function builds a neighbourhood collecting keras lambda layer.
            :param features: 2D tensor with rows of vectoriced words 
            :param look_up: 2D tensor with neighbourhood description for foward,backward or both in one
            :param hop:int: hop position in the network model structure
            :param aggregator:str: aggretaor function [Default mean]
            :param layer_name:str: name of the layer (remind an extension will be added)
        """
        try:
            AssertIsKerasTensor(features)
            AssertIsKerasTensor(look_up)
            AssertNotNone(features,
                          'features'), 'Input tensor for features was None!'
            AssertNotNone(look_up,
                          'look_up'), 'Input tensor for look_up was None!'
            AssertNotNegative(hop), 'input dimension was negative or none!'

            name = layer_name if layer_name is not None else ''
            name_ext = '_lambda_init' if hop == 0 else '_lambda_step'

            dataset = [features, look_up]
            hood_func = lambda x: Nhood(batch_sz=self.batch_size,
                                        aggregator=aggregator,
                                        is_2d=self.input_is_2d).Execute(
                                            x[0], x[1])
            return Lambda(function=hood_func, name=name + name_ext)(dataset)
        except Exception as ex:
            template = "An exception of type {0} occurred in [ModelBuilder.NhoodLambdaLayer]. Arguments:\n{1!r}"
            message = template.format(type(ex).__name__, ex.args)
            print(message)