Esempio n. 1
0
def generate_model():

    if use_cal_vars:
        input_layer = Input(shape=(nb_input_neurons, 5))
    else:
        input_layer = Input(shape=(nb_input_neurons, 1))
    # input_layer = Input(shape=(1, self.nb_input_neurons)) # TODO Dimension???!!

    # Number of hidden layers
    nb_layers = np.array(hidden_neurons).shape[0]
    if nb_layers > 1:
        x = LSTM(hidden_neurons[0], return_sequences=True)(input_layer)
        x = Dropout(dropout)(x)  # dropout layer to prevent overfitting
    else:
        x = LSTM(hidden_neurons[0])(input_layer)
        x = Dropout(dropout)(x)
    iter_temp = 1
    for hn in hidden_neurons[1:]:
        if iter_temp == len(hidden_neurons) - 1:
            x = LSTM(hn)(x)
        else:
            # if some hidden layers have to be added return sequence
            x = LSTM(hn, return_sequences=True)(x)
        iter_temp = iter_temp + 1
        x = Dropout(dropout)(x)

    # Output layer is a pdf function with all power "bins", see theory
    pdf = Dense(len(pdf_sample_points),
                activation='softmax')(x)  # previous layers (x) are stacked
    model = Functional_model(input=input_layer,
                             output=pdf)  # LSTM model definition
    return model
Esempio n. 2
0
    def generate_model(self):
        """
		Builds the network specified by the architecture
		"""

        if self.use_cal_vars:
            self.nb_input_neurons += 4  # minute, hour of day, day of week, month

        # print "Size of input layer:", self.nb_input_neurons
        input_layer = Input(shape=(self.nb_input_neurons, ))

        x = Dense(self.hidden_neurons[0])(input_layer)
        x = Dropout(self.dropout)(x)

        for hn in self.hidden_neurons[1:]:
            x = Dense(hn, activation=self.activation)(x)
            x = Dropout(self.dropout)(x)

        mixing_coeffs = Dense(self.nb_kernels, activation='softmax')(x)
        centroids = Dense(self.nb_kernels, activation='linear')(x)
        std_devs = Dense(self.nb_kernels, activation='softplus')(x)

        output_layer = merge([mixing_coeffs, centroids, std_devs],
                             mode='concat',
                             concat_axis=1)

        model = Functional_model(input=input_layer, output=output_layer)

        return model
Esempio n. 3
0
def generate_model(dropout_rate, learning_rate, nb_hidden_layers,
                   nb_hidden_neurons):
    """
    Generate the neural network model
    Define the model's architecture and the implemented functions
    """
    hidden_neurons = []
    for i in range(nb_hidden_layers):
        hidden_neurons.append(nb_hidden_neurons)

    # Size of input layer
    # -------------------
    # LSTMs expect a 3-dim input of the form [samples, timesteps, features]
    if use_cal_vars:
        input_layer = Input(shape=(nb_input_neurons, 5))
    else:
        input_layer = Input(shape=(nb_input_neurons, 1))
    # input_layer = Input(shape=(1, self.nb_input_neurons)) # TODO Dimension???!!

    # Number of hidden layers
    nb_layers = np.array(hidden_neurons).shape[0]
    if nb_layers > 1:
        x = LSTM(hidden_neurons[0], return_sequences=True)(input_layer)
        x = Dropout(dropout_rate)(x)  # dropout layer to prevent overfitting
    else:
        x = LSTM(hidden_neurons[0])(input_layer)
        x = Dropout(dropout_rate)(x)
    iter_temp = 1
    for hn in hidden_neurons[1:]:
        if iter_temp == len(hidden_neurons) - 1:
            x = LSTM(hn)(x)
        else:
            # if some hidden layers have to be added return sequence
            x = LSTM(hn, return_sequences=True)(x)
        iter_temp = iter_temp + 1
        x = Dropout(dropout_rate)(x)

    # Output layer is a pdf function with all power "bins", see theory
    pdf = Dense(len(pdf_sample_points),
                activation='softmax')(x)  # previous layers (x) are stacked
    model = Functional_model(input=input_layer,
                             output=pdf)  # LSTM model definition

    model.compile(loss=loss_func, optimizer='adam', metrics=['accuracy'])

    return model
Esempio n. 4
0
    def generate_model(self):
        """
        Generate the neural network model
        Define the model's architecture and the implemented functions
        """
        # Size of input layer
        # -------------------
        # LSTMs expect a 3-dim input of the form [samples, timesteps, features]
        if self.use_cal_vars:
            input_layer = Input(shape=(self.nb_input_neurons, 5))
        else:
            input_layer = Input(shape=(self.nb_input_neurons, 1))
        # input_layer = Input(shape=(1, self.nb_input_neurons)) # TODO Dimension???!!

        # Number of hidden layers
        nb_layers = np.array(self.hidden_neurons).shape[0]
        if nb_layers > 1:
            x = LSTM(self.hidden_neurons[0],
                     return_sequences=True)(input_layer)
            x = Dropout(self.dropout)(
                x)  # dropout layer to prevent overfitting
        else:
            x = LSTM(self.hidden_neurons[0])(input_layer)
            x = Dropout(self.dropout)(x)
        iter_temp = 1
        for hn in self.hidden_neurons[1:]:
            if iter_temp == len(self.hidden_neurons) - 1:
                x = LSTM(hn)(x)
            else:
                # if some hidden layers have to be added return sequence
                x = LSTM(hn, return_sequences=True)(x)
            iter_temp = iter_temp + 1
            x = Dropout(self.dropout)(x)

        # Output layer is a pdf function with all power "bins", see theory
        pdf = Dense(len(self.pdf_sample_points),
                    activation='softmax')(x)  # previous layers (x) are stacked
        model = Functional_model(input=input_layer,
                                 output=pdf)  # LSTM model definition
        return model
Esempio n. 5
0
    def generate_model(self):
        """
		"""

        if self.use_cal_vars:
            self.nb_input_neurons += 4  # minute, hour of day, day of week, month

        # print "Size of input layer:", self.nb_input_neurons
        input_layer = Input(shape=(self.nb_input_neurons, ))

        x = Dense(self.hidden_neurons[0])(input_layer)
        x = Dropout(self.dropout)(x)

        for hn in self.hidden_neurons[1:]:
            x = Dense(hn, activation=self.activation)(x)
            x = Dropout(self.dropout)(x)

        pdf = Dense(len(self.pdf_sample_points), activation='softmax')(x)

        model = Functional_model(input=input_layer, output=pdf)

        return model
Esempio n. 6
0
    x = LSTM(hidden_neurons[0])(input_layer)
    x = Dropout(dropout)(x)
iter_temp = 1
for hn in hidden_neurons[1:]:
    if iter_temp == len(hidden_neurons) - 1:
        x = LSTM(hn)(x)
    else:
        # if some hidden layers have to be added return sequence
        x = LSTM(hn, return_sequences=True)(x)
    iter_temp = iter_temp + 1
    x = Dropout(dropout)(x)

# Output layer is a pdf function with all power "bins", see theory
pdf = Dense(len(pdf_sample_points),
            activation='softmax')(x)  # previous layers (x) are stacked
model = Functional_model(input=input_layer,
                         output=pdf)  # LSTM model definition
"""
Generates name of the model
"""
model_name = generate_model_name()
model_directory = os.path.join(working_directory, trained_models_folder,
                               model_name + '/')
#os.mkdir(model_directory)
###mind this
"""
Tries to restore previously saved model weights.
"""
#init_weight
try:
    model.load_weights(os.path.join(model_directory, model_name + '.h5'))
    print(dt.datetime.now().strftime('%x %X') + ' Model ' + model_name +