def load_into_function(self,input): funcs=[] for i in range(len(input.functions)): if i+1<len(self.data[0]): f=PiecewiseConstantFunction([x[0] for x in self.data],[0]+[x[i+1] for x in self.data]) else: f=input.functions[i] funcs.append(f) input.functions=funcs
def create_function_list(self): funcs=[] for i,d in enumerate(self.data): print i,len(d),d for i in range(len(self.data[0])-1): f=PiecewiseConstantFunction([x[0] for x in self.data],[0]+[x[i+1] for x in self.data]) funcs.append(f) return funcs
def loadSequenceMatrix(self, cell): """Load a matrix in HRR vector format from a file and create corresponding output functions.""" d = len(cell[0]) discontinuities = [ RPMutils.STEP_SIZE, 2 * RPMutils.STEP_SIZE, 3 * RPMutils.STEP_SIZE, 4 * RPMutils.STEP_SIZE, 5 * RPMutils.STEP_SIZE ] values1 = [[0 for x in range(6)] for x in range(d)] values2 = [[0 for x in range(6)] for x in range(d)] # 1.0 2.0 3.0 4.0 #signal A # cell1 cell2 cell4 cell5 cell7 #signal B # cell2 cell3 cell5 cell6 cell8 #values for 0th timestep for i in range(d): values1[i][0] = cell[0][i] for i in range(d): values2[i][0] = cell[1][i] #values for 1st timestep for i in range(d): values1[i][1] = cell[1][i] for i in range(d): values2[i][1] = cell[2][i] #values for 2nd timestep for i in range(d): values1[i][2] = cell[3][i] for i in range(d): values2[i][2] = cell[4][i] #values for 3rd timestep for i in range(d): values1[i][3] = cell[4][i] for i in range(d): values2[i][3] = cell[5][i] #values for 4th timestep for i in range(d): values1[i][4] = cell[6][i] for i in range(d): values2[i][4] = cell[7][i] for i in range(d): values1[i][5] = 0 values2[i][5] = 0 #create signalA f = [] for i in range(d): f = f + [PiecewiseConstantFunction(discontinuities, values1[i])] sigA = FunctionInput("sigA", f, Units.UNK) #create signal B f = [] for i in range(d): f = f + [PiecewiseConstantFunction(discontinuities, values2[i])] sigB = FunctionInput("sigB", f, Units.UNK) #create signal for adaptive learning rate rates = [1.0, 1.0 / 2.0, 1.0 / 3.0, 1.0 / 4.0, 1.0 / 5.0, 0.0] lrate = FunctionInput( "lrate", [PiecewiseConstantFunction(discontinuities, rates)], Units.UNK) #create signal for second last cell f = [] for i in range(d): f = f + [ConstantFunction(1, cell[7][i])] secondLast = FunctionInput("secondLast", f, Units.UNK) #load rule signal from file rulesig = [] if RPMutils.LOAD_RULES: rulefile = open(RPMutils.ruleFile()) lines = rulefile.readlines() rulefile.close() mod, rule = lines[0].split(":") if mod == "sequencesolver": rule = RPMutils.str2floatlist(rule.strip()) else: rule = [0.0 for i in range(self.d)] rulesig = RPMutils.makeInputVectors("rulesig", [rule]) if RPMutils.RUN_WITH_CONTROLLER: return ([sigA, sigB, lrate, secondLast] + rulesig) else: #create signals for answers ans = [] for i in range(8): ans = ans + [cell[8 + i]] return ([sigA, sigB, lrate, secondLast] + ans + rulesig)
def __init__(self, actions, Qradius=1, noiselevel=0.03): """Builds the BGNetwork. :param actions: actions available to the system :type actions: list of tuples (action_name,action_vector) :param Qradius: expected radius of Q values :param noiselevel: standard deviation of noise added to Q values for exploration """ self.name = "BGNetwork" net = nef.Network(self, seed=HRLutils.SEED, quick=False) self.N = 50 self.d = len(actions) self.mut_inhib = 1.0 # mutual inhibition between actions self.tauPSC = 0.007 # make basal ganglia netbg = nef.Network("bg") bginput = netbg.make("bginput", 1, self.d, mode="direct") bginput.fixMode() bginput.addDecodedTermination("input", MU.diag([1.0 / Qradius for _ in range(self.d)]), 0.001, False) # divide by Q radius to get values back into 0 -- 1 range bgoutput = netbg.make("bgoutput", 1, self.d, mode="direct") bgoutput.fixMode() basalganglia.make_basal_ganglia(netbg, bginput, bgoutput, dimensions=self.d, neurons=200) bg = netbg.network net.add(bg) bg.fixMode([SimulationMode.DEFAULT, SimulationMode.RATE]) bg.exposeTermination(bginput.getTermination("input"), "input") bg.exposeOrigin(bgoutput.getOrigin("X"), "X") # insert noise (used to give some randomness to drive exploration) noiselevel = net.make_input("noiselevel", [noiselevel]) noise = noisenode.NoiseNode(1, dimension=len(actions)) net.add(noise) net.connect(noiselevel, noise.getTermination("scale")) net.connect(noise.getOrigin("noise"), "bg.bginput", pstc=0.001) # add bias to shift everything up to 0.5--1.5 biasinput = net.make_input("biasinput", [0.5]) net.connect(biasinput, "bg.bginput", transform=[[1] for _ in range(self.d)], pstc=0.001) # invert BG output (so the "selected" action will have a positive value # and the rest zero) invert = thalamus.make(net, name="invert", neurons=self.N, dimensions=self.d, useQuick=False) invert.fixMode([SimulationMode.DEFAULT, SimulationMode.RATE]) net.connect(bg, invert.getTermination("bg_input")) # add mutual inhibition net.connect(invert.getOrigin("xBiased"), invert, pstc=self.tauPSC, transform=[[0 if i == j else -self.mut_inhib for j in range(self.d)] for i in range(self.d)]) # threshold output values so that you get a nice clean 0 for # non-selected and 1 for selected threshf = HRLutils.node_fac() threshold = 0.1 threshf.setIntercept(IndicatorPDF(threshold, 1.0)) val_threshold = net.make_array("val_threshold", self.N * 2, self.d, node_factory=threshf, encoders=[[1]]) val_threshold.addDecodedOrigin( "output", [PiecewiseConstantFunction([threshold], [0, 1]) for _ in range(self.d)], "AXON", True) net.connect(invert.getOrigin("xBiased"), val_threshold, pstc=self.tauPSC) # output action (action vectors weighted by BG output) weight_actions = net.make_array("weight_actions", 50, len(actions[0][1]), intercept=(0, 1)) net.connect(val_threshold.getOrigin("output"), weight_actions, transform=MU.transpose([actions[i][1] for i in range(self.d)]), pstc=0.007) # save the BG output (selected action and selected action value) save_relay = net.make("save_relay", 1, 1, mode="direct") save_relay.fixMode() save_relay.addDecodedTermination("input", [[1]], 0.001, False) saved_action = memory.Memory("saved_action", self.N * 2, len(actions[0][1]), inputscale=75) net.add(saved_action) net.connect(weight_actions, saved_action.getTermination("target")) net.connect(save_relay, saved_action.getTermination("transfer")) saved_vals = memory.Memory("saved_values", self.N * 2, self.d, inputscale=75) net.add(saved_vals) net.connect(val_threshold.getOrigin("output"), saved_vals.getTermination("target")) net.connect(save_relay, saved_vals.getTermination("transfer")) # put the saved values through a threshold (we want a nice clean # zero for non-selected values) nfac = HRLutils.node_fac() nfac.setIntercept(IndicatorPDF(0.2, 1)) saved_vals_threshold = net.make_array("saved_vals_threshold", self.N, self.d, node_factory=nfac, encoders=[[1]]) saved_vals_threshold.addDecodedOrigin( "output", [PiecewiseConstantFunction([0.3], [0, 1]) for _ in range(self.d)], "AXON", True) net.connect(saved_vals, saved_vals_threshold, pstc=self.tauPSC) self.exposeTermination(bg.getTermination("input"), "input") self.exposeTermination(save_relay.getTermination("input"), "save_output") self.exposeOrigin(val_threshold.getOrigin("output"), "curr_vals") self.exposeOrigin(weight_actions.getOrigin("X"), "curr_action") self.exposeOrigin(saved_vals_threshold.getOrigin("output"), "saved_vals") self.exposeOrigin(saved_action.getOrigin("X"), "saved_action")
tau = 0.005 deltaT = 0.0001 population_size = 1600 glength = len(transMatX) + 1 # number of dimensions that represent gaussian Imat = eye(glength - 1) initial_input = [random() * 2 - 1 for dummy in range(glength)] zero = zeros(glength) ################### # Path-integrator ################### net = nef.Network('Path_Integrator') place_input = net.make_input('place_input', zero) place_input.functions = [ PiecewiseConstantFunction([0.005], [initial_input[d], zero[d]]) for d in range(len(initial_input)) ] control = net.make_input('control', [0, 0]) PI = net.make('PI', population_size, glength + 2, radius=1, encoders=directions, eval_points=samples, quick=True) net.connect(place_input, PI, weight=tau * 20, pstc=tau,