def train(self, training_files, learningrate=0.01, scaling=True, noise=False, verbose=True): print "building dataset..." ds = SupervisedDataSet(SensorModel.array_length(self.sensor_ids), 1) # read training file line, create sensormodel object, do backprop a = None s = None for logfile in training_files: print "loading file", logfile with open(logfile) as f: for line in f: if line.startswith("Received:"): s = SensorModel(string=line.split(' ', 1)[1]) elif line.startswith("Sending:"): a = Actions.from_string(string=line.split(' ', 1)[1]) if s is not None and a is not None: ds.addSample(inp=s.get_array(self.sensor_ids), target=a[self.action_ids[0]]) if noise: # add the same training sample again but with noise in the sensors s.add_noise() ds.addSample(inp=s.get_array(self.sensor_ids), target=a[self.action_ids[0]]) s = None a = None print "dataset size:", len(ds) if scaling: print "scaling dataset" self.scaler_input = StandardScaler(with_mean=True, with_std=False).fit(ds.data['input']) ds.data['input'] = self.scaler_input.transform(ds.data['input']) ds.data['target'] = ds.data['target'] #self.trainer = BackpropTrainer(self.net, learningrate=learningrate, verbose=verbose) self.trainer = RPropMinusTrainer(self.net, verbose=verbose, batchlearning=True) print "training network..." self.trainer.trainUntilConvergence(dataset=ds, validationProportion=0.25, maxEpochs=10, continueEpochs=2)
def __init__(self, sensor_ids, action_ids, n_hidden, bias=True): super(FFNetwork, self).__init__(sensor_ids=sensor_ids, action_ids=action_ids) self.net = buildNetwork(SensorModel.array_length(sensor_ids), n_hidden, 1, hiddenclass=TanhLayer, #outclass=TanhLayer, bias=bias) self.scaler_input = None self.trainer = None