def main(): training_dataset = produce_dataset( os.path.join(os.path.dirname(os.path.realpath(__file__)), 'training_data', 'pickle', 'training_playlists', 'training_4.txt')) test_dataset = produce_dataset( os.path.join(os.path.dirname(os.path.realpath(__file__)), 'training_data', 'pickle', 'test_playlists', 'test_4.txt')) settings = { # Required settings "n_inputs": 8, # Number of input signals "layers": [ (8, sigmoid_function ), # First Hidden Layer (number of nodes, activation function) (1, sigmoid_function) # Output layer ], # Optional settings "initial_bias_value": 0.0, "weights_low": -0.1, "weights_high": 0.1 } network = NeuralNet.load_network_from_file("%s.pkl" % "training54point1") training_set = training_dataset test_set = test_dataset cost_function = cross_entropy_cost RMSprop(network, training_set, test_set, cost_function, ERROR_LIMIT=0.1, max_iterations=100000, batch_size=400) network.save_network_to_file("%s.pkl" % "training2")
from nimblenet.data_structures import Instance from nimblenet.tools import print_test import random import smbus import math import time import os import RPi.GPIO as GPIO import requests import json import datetime import numpy as np ##-------------------BELLE-------------------- ## Loads Belle's beautiful brain network = NeuralNet.load_network_from_file("Belle_5.pkl") # Print a network test #print_test( network, training_data, cost_function ) ##------------------LISA-------------------- # Power management registers power_mgmt_1 = 0x6b power_mgmt_2 = 0x6c gyro_scale = 131.0 accel_scale = 16384.0 address = 0x68 # This is the address value read via the i2cdetect command
import sys from Playlist import Playlist from nimblenet.data_structures import Instance from nimblenet.neuralnet import NeuralNet from playlist_recommender_utilities import * username = sys.argv[1] playlist = sys.argv[2] base_playlist = Playlist.Playlist(username, playlist) base_playlist.get_playlist() base_playlist.generate_playlist_vector() base_playlist.generate_normalized_aggregate_vector() # Funtion to return the 100 closet songs to playlist normalized_aggregate_vectors hundred_song_set = gather_hundred_songs(base_playlist) prediction_set = create_prediction_data(hundred_song_set) network = NeuralNet.load_network_from_file("%s.pkl" % "training54point1") recommended_values = network.predict(prediction_set) returned_playlist = generate_top_twentyfive(recommended_values, hundred_song_set) for song in returned_playlist: print song.attributes['title']
def load_network(): return NeuralNet.load_network_from_file(network_file)
"n_inputs": 14, # Number of network input signals "layers": [(5, sigmoid_function), (4, sigmoid_function)], # [ (number_of_neurons, activation_function) ] # The last pair in the list dictate the number of output signals # Optional settings "weights_low": -0.1, # Lower bound on the initial weight value "weights_high": 0.4, # Upper bound on the initial weight value } # initialize the neural network network = NeuralNet(settings) network.check_gradient(training_data, cost_function) ## load a stored network configuration network = NeuralNet.load_network_from_file("redsk_00009.pkl") ctx = Context() ctx.init() # Create the user generator user = UserGenerator() user.create(ctx) #Obtener imagen depth = DepthGenerator() depth.create(ctx) depth.set_resolution_preset(DefResolution.RES_VGA) depth.fps = 30 ctx.start_generating_all()