Beispiel #1
0
    def solve_binary_classification(self, iterations=100, learning_rate=0.01):
        nr_features = self.train_inputs[0]
        if len(nr_features) == 2:
            plot_classification_data(self.train_inputs, self.train_outputs, self.input_features, self.output_names)

        classifier = MyLogisticRegression(iterations, learning_rate)
        classifier.fit(self.train_inputs, self.train_outputs)
        b = classifier.get_coef()
        intercept = classifier.get_intercept()
        f = "f(x) = " + str(intercept)
        for i in range(len(b)):
            f += " + " + str(b[i]) + "*x" + str(i + 1)
        print("model: " + f)

        computed_test_outputs = classifier.predict(self.test_inputs)
        if len(nr_features) == 2:
            plot_predictions(self.test_inputs, self.test_outputs, computed_test_outputs, self.input_features,
                             self.output_names)
        self.eval_classification(computed_test_outputs)

        # sklearn results
        classifier_sk = linear_model.LogisticRegression()
        classifier_sk.fit(self.train_inputs, self.train_outputs)
        b = classifier_sk.coef_.tolist()[0]
        intercept = classifier_sk.intercept_[0]
        f = "f(x) = " + str(intercept)
        for i in range(len(b)):
            f += " + " + str(b[i]) + "*x" + str(i + 1)
        print("model sk: " + f)
        computed_test_outputs_sk = classifier_sk.predict(self.test_inputs)
        print(sklearn.metrics.classification_report(self.test_outputs, computed_test_outputs_sk,
                                                    target_names=self.output_names))
def run_lstm(model, sequence_length, prediction_steps):
    data = None
    global_start_time = time.time()
    epochs = 1
    ratio_of_data = 1  # ratio of data to use from 2+ million data points
    path_to_dataset = 'data/household_power_consumption.txt'

    if data is None:
        print('Loading data... ')
        x_train, y_train, x_test, y_test, result_mean = load_data(path_to_dataset, sequence_length,
                                                                  prediction_steps, ratio_of_data)
    else:
        x_train, y_train, x_test, y_test = data

    print('\nData Loaded. Compiling...\n')

    if model is None:
        model = build_model(prediction_steps)
        try:
            model.fit(x_train, y_train, batch_size=128, epochs=epochs, validation_split=0.05)
            predicted = model.predict(x_test)
            # predicted = np.reshape(predicted, (predicted.size,))
            model.save('LSTM_power_consumption_model.h5')  # save LSTM model
        except KeyboardInterrupt:  # save model if training interrupted by user
            print('Duration of training (s) : ', time.time() - global_start_time)
            model.save('LSTM_power_consumption_model.h5')
            return model, y_test, 0
    else:  # previously trained mode is given
        print('Loading model...')
        predicted = model.predict(x_test)
    plot_predictions(result_mean, prediction_steps, predicted, y_test, global_start_time)

    return None
Beispiel #3
0
def main(args):
    x_train, y_train, x_test, y_test = generate_data(args.samples,
                                                     args.seq_len)

    model = train(x_train, y_train, args)
    y_pred = evaluate_model(model, x_train, args)

    print(y_pred)
    print(y_train)

    plot_predictions(x_train, y_train, y_pred)
Beispiel #4
0
def plot(model):
    model = model.upper()
    if not os.path.exists(f'models/{model}.model'):
        return f'Model for {model} not found'
    graph = plot_predictions(model, 90)
    buf = BytesIO()
    graph.savefig(buf, format="png")
    data = base64.b64encode(buf.getbuffer()).decode("ascii")
    return render_template('plot.html', company=model, data=data)
Beispiel #5
0
    preds = list()

    for ii, il, oi in zip(input_.T, labels_.T, output):
        q = decode(sequence=ii, lookup=idx2block, separator=' ')
        l = decode(sequence=il, lookup=idx2block, separator=' ')
        o = decode(sequence=oi, lookup=idx2block, separator=' ')
        decoded = o.split(' ')

        if decoded.count('UNK') == 0:
            if decoded not in replies:
                if len(l) == len(o):
                    print('i: [{0}]\na: [{1}]\np: [{2}]\n'.format(
                        q, l, ' '.join(decoded)))
                    print("{}".format("".join(["-" for i in range(80)])))
                    lsplits = l.split()
                    osplits = o.split()
                    for lspl in lsplits:
                        match = re.match(r"(\d+)(\w)", lspl)
                        block, iotype = match.group(1), match.group(2)
                        lbls.append(block)

                    for osp in osplits:
                        match = re.match(r"(\d+)(\w)", osp)
                        block, iotype = match.group(1), match.group(2)
                        preds.append(block)
                replies.append(decoded)

    preds = np.asarray(preds, dtype=np.int64)
    lbls = np.asarray(lbls, dtype=np.int64)
    plot_predictions(lbls, preds, segment_size, vocab_freq, win_size_ms)
Beispiel #6
0
def run(iteration):
    """Runs an iteration: fit ESN, evolve a second ESN, plot the predictions of both networks to compare effectiveness.
    :rtype: none
    :param iteration: which iteration of program is running - iteration is used to seed the random state, ensure variety
    :return: none
    """
    # CREATE THE ECHO STATE NETWORK // ------------------------------------------------------------------------------ //
    rng = np.random.RandomState(iteration)  # Seed the random state
    esn = net.create_neural_network(1, 1, n_reservoir, spectral_radius,
                                    sparsity, noise, input_scaling, rng,
                                    True)  # Create Echo-State Network

    # OPTIMIZE THE ECHO STATE NETWORK HYPERPARAMETERS // ------------------------------------------------------------ //
    global training_window_size  # Globalize training_window_size to set it to the optimal window
    if training_window_size == 0:  # If training window isn't set yet (is 0)
        training_window_size = get_best_training_window(
            prediction_window, n_optimizer_predictions, esn, target_data,
            smallest_possible_training_window,
            largest_possible_training_window, training_window_test_increase)
        global prices  # Globalize prices to set the prices that will be predicted each iteration
        prices = target_data[training_window_size:training_window_size +
                             prediction_window]

        # Plot OHLC using plotly
        write_ohlc_html(entries_required,
                        training_window_size + prediction_window)

    # // ------------------------------------------------ (BASE ESN) ------------------------------------------------ //
    # Predictions DataFrame to hold each evolution which improved prediction accuracy
    predictions_df = pd.DataFrame(columns=(['generation', 'prediction']))

    # Fit initial network
    esn.fit(np.ones(training_window_size),
            target_data[0:training_window_size])  # Train network on data

    # Set the benchmark network results
    best_generation = -1  # -1 is no evolution at all
    best_esn = net.copy_neural_network(esn)  # Set best esn
    best_mse = net.get_fitness(esn, prices, prediction_window,
                               n_fitness_predictions)  # Set best MSE

    # Perform prediction using initial network
    prediction = esn.predict(np.ones(prediction_window))  # Predict a window
    predictions = np.ones(prediction_window)  # For holding any predictions
    predictions[
        0:prediction_window] = prediction[:, 0]  # Place prediction in array
    best_prediction = copy.deepcopy(predictions)  # Set best prediction

    # Print initial network information
    print("Initial MSE before evolution: ", best_mse, "\n")

    # Append the predictions DataFrame with the initial best prediction
    iteration_df = predictions_df.append(
        {
            'generation': -1,
            'prediction': best_prediction
        }, ignore_index=True)
    predictions_df = copy.deepcopy(iteration_df)

    # USE EVOLUTIONARY ALGORITHM TO TRAIN ECHO STATE NETWORK // ----------------------------------------------------- //
    population_of_esns = [net.copy_neural_network(esn)
                          ] * n_population  # Create initial population

    # Create the extreme lists to hold extreme prediction values - used for y-range calculations during plotting
    training_prediction_extremes = [min(prediction)[0], max(prediction)[0]]

    # Perform neuroevolution
    for generation in range(0, n_generations, 1):
        print("Generation: ", generation)
        lowest_mse = best_mse
        for member_id, member in enumerate(population_of_esns):
            # Evolve the weights
            weights = copy.deepcopy(member.W_out)
            weights[0] = net.mutate_weights(weights[0], mutation_rate)
            member.W_out = copy.deepcopy(weights)

            # Get fitness of the member
            mse_fitness = net.get_fitness(member, prices, prediction_window,
                                          n_fitness_predictions)

            # If this member has the best fitness so far
            if mse_fitness < lowest_mse:
                # Set the benchmark network results
                best_esn = net.copy_neural_network(
                    member)  # Stores the best member
                best_mse = mse_fitness  # And the best mse (fitness)
                # Perform prediction using this member
                prediction = member.predict(np.ones(prediction_window))
                prediction_array = np.ones(prediction_window)
                prediction_array[0:prediction_window] = prediction[:, 0]
                best_prediction = copy.deepcopy(prediction_array)
                # Store ID of this member
                best_member_id = member_id

        # Will be the case if any member improved the MSE from last generation
        if best_mse < lowest_mse:
            best_generation = generation
            # Print generation information
            print("\nBest generation: ", generation, "\nBest member: ",
                  best_member_id, "\nMSE: ", best_mse, "\nMSE before/after: ",
                  lowest_mse, "/", best_mse, "\nMSE difference: ",
                  best_mse - lowest_mse)
            # Append the predictions DataFrame with generation's best prediction
            iteration_df = predictions_df.append(
                {
                    'generation': generation,
                    'prediction': best_prediction
                },
                ignore_index=True)
            predictions_df = copy.deepcopy(iteration_df)
            # Update extremes list for y-range of plot
            if min(prediction) < training_prediction_extremes[0]:
                training_prediction_extremes[0] = min(prediction)[0]
            if max(prediction) > training_prediction_extremes[1]:
                training_prediction_extremes[1] = max(prediction)[0]

        # Perform crossover
        population_of_esns = net.perform_crossover(best_esn,
                                                   population_of_esns,
                                                   crossover_rate)

    # Plot predictions from training
    plot.plot_predictions(predictions_df, target_data,
                          training_window_size + prediction_window,
                          training_prediction_extremes, prediction_window,
                          'Training Predictions (' + str(iteration) + ').html',
                          parameters.training_html_auto_show)

    # TEST EVOLVED ECHO-STATE NETWORK ON NEW TARGET DATA // --------------------------------------------------------- //
    # Create DataFrames for prediction tests
    dtypes = np.dtype([('generation', str), ('prediction', np.float64)])
    columns_data = np.empty(0, dtype=dtypes)
    non_evolved_test_predictions_df = pd.DataFrame(columns_data)
    evolved_test_predictions_df = pd.DataFrame(columns_data)
    # Create lists to hold the test network MSE calculations
    non_evolved_mse_list = [0.0] * n_test_predictions
    evolved_mse_list = [0.0] * n_test_predictions
    # Create the extreme lists to hold extreme prediction values - used for y-range calculations during plotting
    non_evolved_prediction_extremes = [99.0, -99.0]
    evolved_prediction_extremes = [99.0, -99.0]

    # NON-EVOLVED NETWORK PREDICTIONS  // --------------------------------------------------------------------------- //
    for p in range(1, n_test_predictions, 1):
        # Perform prediction using non-evolved network
        prediction = esn.predict(np.ones(prediction_window))
        prediction_array = np.ones(prediction_window)
        prediction_array[0:0 + prediction_window] = prediction[:, 0]
        non_evolved_mse_list[p] = net.get_mse(prices, prediction_array)
        # Append the predictions DataFrame with non-evolved network prediction
        temp_df = non_evolved_test_predictions_df.append(
            {
                'generation': str(-1),
                'prediction': prediction_array
            },
            ignore_index=True)
        non_evolved_test_predictions_df = copy.deepcopy(temp_df)
        # Update extremes list for y-range of plot
        if min(prediction) < non_evolved_prediction_extremes[0]:
            non_evolved_prediction_extremes[0] = min(prediction)[0]
        if max(prediction) > non_evolved_prediction_extremes[1]:
            non_evolved_prediction_extremes[1] = max(prediction)[0]

    # EVOLVED NETWORK PREDICTIONS // -------------------------------------------------------------------------------- //
    if best_generation != -1:
        for p in range(1, n_test_predictions, 1):
            # Perform prediction using evolved network
            neuroevolution_prediction = best_esn.predict(
                np.ones(prediction_window))
            prediction_array = np.ones(prediction_window)
            prediction_array[0:0 +
                             prediction_window] = neuroevolution_prediction[:,
                                                                            0]
            evolved_mse_list[p] = net.get_mse(prices, prediction_array)
            # Append the predictions DataFrame with evolved network prediction
            temp_df = evolved_test_predictions_df.append(
                {
                    'generation': str(best_generation),
                    'prediction': prediction_array
                },
                ignore_index=True)
            evolved_test_predictions_df = copy.deepcopy(temp_df)
            # Update extremes list for y-range of plot
            if min(neuroevolution_prediction) < evolved_prediction_extremes[0]:
                evolved_prediction_extremes[0] = min(
                    neuroevolution_prediction)[0]
            if max(neuroevolution_prediction) > evolved_prediction_extremes[1]:
                evolved_prediction_extremes[1] = max(
                    neuroevolution_prediction)[0]

    # Update the prediction extremes array to pass into plotting function
    all_prediction_extremes = non_evolved_prediction_extremes
    if not evolved_prediction_extremes[
            0] == 99.0 and not evolved_prediction_extremes[1] == -99.0:
        all_prediction_extremes += evolved_prediction_extremes

    # PLOT TESTING PREDICTIONS // ----------------------------------------------------------------------------------- //
    plot.plot_test_predictions(
        non_evolved_test_predictions_df, evolved_test_predictions_df,
        target_data, all_prediction_extremes,
        training_window_size + prediction_window, prediction_window,
        'Testing Predictions (' + str(iteration) + ').html',
        parameters.testing_html_auto_show)

    # RETURN THE RESULTS OF THIS ITERATION // ----------------------------------------------------------------------- //
    return non_evolved_mse_list, evolved_mse_list