Beispiel #1
0
from nimblenet.cost_functions import cross_entropy_cost
from nimblenet.learning_algorithms import *
from nimblenet.neuralnet import NeuralNet
from nimblenet.preprocessing import construct_preprocessor, standarize
from nimblenet.data_structures import Instance
from nimblenet.tools import print_test

# Training set
dataset = [
    Instance([0, 0], [0]),
    Instance([1, 0], [1]),
    Instance([0, 1], [1]),
    Instance([1, 1], [1])
]

preprocess = construct_preprocessor(dataset, [standarize])
training_data = preprocess(dataset)
test_data = preprocess(dataset)

cost_function = cross_entropy_cost
settings = {
    # Required settings
    "n_inputs": 2,  # Number of network input signals
    "layers": [(3, sigmoid_function), (1, sigmoid_function)],
    # [ (number_of_neurons, activation_function) ]
    # The last pair in the list dictate the number of output signals

    # Optional settings
    "initial_bias_value": 0.0,
    "weights_low": -0.1,  # Lower bound on the initial weight value
    "weights_high": 0.1,  # Upper bound on the initial weight value
Beispiel #2
0
                                    accel_scaled_z)

        last_x = K * (last_x + gyro_x_delta) + (K1 * rotation_x)
        last_y = K * (last_y + gyro_y_delta) + (K1 * rotation_y)

        #[X1,Y1,X2,Y2]
        nSlice.append(last_x)
        nSlice.append(last_y)

        #When N values have been recorded
        if len(nSlice) == N:

            ##----------Predictions------------
            #Throws nSlice to Belle
            inData = [Instance(nSlice)]
            preprocess = construct_preprocessor(inData, [standarize])
            prediction_set = preprocess(inData)
            #Prints a prediction!
            #HIGH LOW = Bicep Curl!
            #LOW HIGH = Trash!
            #print(str(nSlice[0]) + ", " + str(nSlice[1]))
            neuralOut = network.predict(inData)
            print(neuralOut)
            exponents = np.floor(np.log10(np.abs(neuralOut)))
            ##----------Total Movement Calculation--------
            temp_x = nSlice[0]
            temp_y = nSlice[1]
            total = 0

            for i in range(len(nSlice)):
                if i % 2 == 0:
Beispiel #3
0
from nimblenet.activation_functions import sigmoid_function, tanh_function, linear_function, LReLU_function, ReLU_function, elliot_function, symmetric_elliot_function, softmax_function, softplus_function, softsign_function
from nimblenet.cost_functions import sum_squared_error, cross_entropy_cost, hellinger_distance, softmax_neg_loss
from nimblenet.learning_algorithms import backpropagation, scaled_conjugate_gradient, scipyoptimize, resilient_backpropagation
from nimblenet.evaluation_functions import binary_accuracy
from nimblenet.neuralnet import NeuralNet
from nimblenet.preprocessing import construct_preprocessor, standarize, replace_nan, whiten
from nimblenet.data_structures import Instance
from nimblenet.tools import print_test


# Training sets
dataset             = [ Instance( [0,0], [0] ), Instance( [1,0], [1] ), Instance( [0,1], [1] ), Instance( [1,1], [0] ) ]
preprocess          = construct_preprocessor( dataset, [standarize] ) 
training_data       = preprocess( dataset )
test_data           = preprocess( dataset )


cost_function       = cross_entropy_cost
settings            = {
    # Required settings
    "n_inputs"              : 2,       # Number of network input signals
    "layers"                : [  (3, sigmoid_function), (1, sigmoid_function) ],
                                        # [ (number_of_neurons, activation_function) ]
                                        # The last pair in the list dictate the number of output signals
    
    # Optional settings
    "weights_low"           : -0.1,     # Lower bound on the initial weight value
    "weights_high"          : 0.1,      # Upper bound on the initial weight value
}