def _init_random_sampling_net(self): if not hasattr(self, '_default_net'): raise mdp.NodeException("'_init_random_sampling_net' must be called after '_init_default_net'") if self.field_channels_xy[0] == (-1, -1): sb = mdp.hinet.RandomChannelSwitchboard(self.in_channels_xy, self.in_channels_xy, in_channel_dim=self.in_channel_dim, out_channels=1, field_dstr=self.field_dstr) else: sb = mdp.hinet.RandomChannelSwitchboard(self.in_channels_xy, self.field_channels_xy[0], in_channel_dim=self.in_channel_dim, out_channels=self.n_training_fields[0], field_dstr=self.field_dstr) cl = mdp.hinet.CloneLayer(self._default_net[0].flow[1].node, n_nodes=sb.output_channels) node = mdp.hinet.FlowNode(mdp.Flow([sb, cl])) layers = [node] for i in xrange(1, self.n_layers): if self.field_channels_xy[i] == (-1, -1): sb = mdp.hinet.RandomChannelSwitchboard(self._default_net[i - 1].flow[0].out_channels_xy, self._default_net[i - 1].flow[0].out_channels_xy, in_channel_dim=self._default_net[i - 1].flow[1].node.output_dim, out_channels=1, field_dstr=self.field_dstr) else: sb = mdp.hinet.RandomChannelSwitchboard(self._default_net[i - 1].flow[0].out_channels_xy, self.field_channels_xy[i], in_channel_dim=self._default_net[i - 1].flow[1].node.output_dim, out_channels=self.n_training_fields[i], field_dstr=self.field_dstr) cl = mdp.hinet.CloneLayer(self._default_net[i].flow[1].node, n_nodes=sb.output_channels) node = mdp.hinet.FlowNode(mdp.Flow([sb, cl])) layers.append(node) return layers
def _init_default_net(self): if self.field_channels_xy[0] == (-1, -1): sb = mdp.hinet.Rectangular2dSwitchboard(self.in_channels_xy, self.in_channels_xy, (1, 1), self.in_channel_dim, True) else: sb = mdp.hinet.Rectangular2dSwitchboard(self.in_channels_xy, self.field_channels_xy[0], self.field_spacing_xy[0], self.in_channel_dim, True) ln = self._hinet_node(sb.out_channel_dim, self.n_features[0]) cl = mdp.hinet.CloneLayer(ln, n_nodes=sb.output_channels) rec_n_training_fields = [sb.output_channels] pool = mdp.hinet.Pool2D(in_channels_xy=sb.out_channels_xy, field_channels_xy=self.pool_channels_xy[0], field_spacing_xy=self.pool_spacing_xy[0], in_channel_dim=ln.output_dim, mode=self.pool_mode) node = mdp.hinet.FlowNode(mdp.Flow([sb, cl, pool]), dtype=self.dtype) layers = [node] for i in xrange(1, self.n_layers): if self.field_channels_xy[i] == (-1, -1): sb = mdp.hinet.Rectangular2dSwitchboard(pool.out_channels_xy, pool.out_channels_xy, (1, 1), ln.output_dim, True) else: sb = mdp.hinet.Rectangular2dSwitchboard(pool.out_channels_xy, self.field_channels_xy[i], self.field_spacing_xy[i], ln.output_dim, True) ln = self._hinet_node(sb.out_channel_dim, self.n_features[i]) cl = mdp.hinet.CloneLayer(ln, n_nodes=sb.output_channels) rec_n_training_fields.append(sb.output_channels) pool = mdp.hinet.Pool2D(in_channels_xy=sb.out_channels_xy, field_channels_xy=self.pool_channels_xy[i], field_spacing_xy=self.pool_spacing_xy[i], in_channel_dim=ln.output_dim, mode=self.pool_mode) node = mdp.hinet.FlowNode(mdp.Flow([sb, cl, pool]), dtype=self.dtype) layers.append(node) input_shape = (self.in_channels_xy[0], self.in_channels_xy[1], self.in_channel_dim) output_shape = (pool.out_channels_xy[0], pool.out_channels_xy[1], ln.output_dim) return layers, rec_n_training_fields, input_shape, output_shape
def _init_random_sampling_net(self): if self.field_channels_xy[0] == (-1, -1): sb = mdp.hinet.RandomChannelSwitchboard(self.in_channels_xy, self.in_channels_xy, in_channel_dim=self.in_channel_dim, out_channels=1, field_dstr=self.field_dstr) else: sb = mdp.hinet.RandomChannelSwitchboard(self.in_channels_xy, self.field_channels_xy[0], in_channel_dim=self.in_channel_dim, out_channels=self.n_training_fields[0], field_dstr=self.field_dstr) cl = mdp.hinet.CloneLayer(self._default_net[0].flow[1].node, n_nodes=sb.output_channels) node = mdp.hinet.FlowNode(mdp.Flow([sb, cl]), dtype=self.dtype) layers = [node] for i in xrange(1, self.n_layers): if self.field_channels_xy[i] == (-1, -1): sb = mdp.hinet.RandomChannelSwitchboard(self._default_net[i - 1].flow[2].out_channels_xy, self._default_net[i - 1].flow[2].out_channels_xy, in_channel_dim=self._default_net[i - 1].flow[1].node.output_dim, out_channels=1, field_dstr=self.field_dstr) else: sb = mdp.hinet.RandomChannelSwitchboard(self._default_net[i - 1].flow[2].out_channels_xy, self.field_channels_xy[i], in_channel_dim=self._default_net[i - 1].flow[1].node.output_dim, out_channels=self.n_training_fields[i], field_dstr=self.field_dstr) cl = mdp.hinet.CloneLayer(self._default_net[i].flow[1].node, n_nodes=sb.output_channels) node = mdp.hinet.FlowNode(mdp.Flow([sb, cl]), dtype=self.dtype) layers.append(node) return layers
def hierachynet(bo,recf,ovl): #correct switchboard = mdp.hinet.Rectangular2dSwitchboard(in_channels_xy = (bo,bo), field_channels_xy=(recf,recf), field_spacing_xy =(ovl,ovl)) sfa_dim = 48 sfa_lower_out= 32 #64 sfanode = mdp.nodes.SFANode(input_dim = switchboard.out_channel_dim, output_dim = sfa_dim) sfa2node = mdp.nodes.QuadraticExpansionNode(input_dim=sfa_dim) #noi_node = mdp.nodes.NoiseNode(input_dim = sfa2node.output_dim,noise_args=(0,sqrt(0.0005))) #test# sfanode2 = mdp.nodes.SFANode(input_dim = sfa2node.output_dim,output_dim = sfa_lower_out) flownode = mdp.hinet.FlowNode(mdp.Flow([sfanode,sfa2node,sfanode2])) #flownode = mdp.hinet.FlowNode(mdp.Flow([sfanode,sfa2node,noi_node,sfanode2])) #test# sfalayer = mdp.hinet.CloneLayer(flownode, n_nodes = switchboard.output_channels) flow = mdp.Flow([switchboard, sfalayer]) sfa_upper_bo = 6 sfa_upper_recf = 4 sfa_upper_out = 32 sfa_top_out = 10 ovl2 = 2 switchboard2 = mdp.hinet.Rectangular2dSwitchboard(in_channels_xy = (sfa_upper_bo,sfa_upper_bo), field_channels_xy=(sfa_upper_recf,sfa_upper_recf), field_spacing_xy = (ovl2,ovl2), # new adding in_channel_dim = sfa_lower_out) # sfa_uppernode = mdp.nodes.SFANode(input_dim = switchboard2.out_channel_dim, output_dim = sfa_dim) sfa_upperexp = mdp.nodes.QuadraticExpansionNode(input_dim = sfa_dim) #noi_node = mdp.nodes.NoiseNode(input_dim = sfa_upperexp.output_dim,noise_args=(0,sqrt(0.0005))) #test# sfa_uppernode2 = mdp.nodes.SFANode(input_dim = sfa_upperexp.output_dim, output_dim = sfa_upper_out) upper_flownode = mdp.hinet.FlowNode(mdp.Flow([sfa_uppernode,sfa_upperexp,sfa_uppernode2])) #upper_flownode = mdp.hinet.FlowNode(mdp.Flow([sfa_uppernode,sfa_upperexp,noi_node,sfa_uppernode2])) #test# upper_sfalayer = mdp.hinet.CloneLayer(upper_flownode, n_nodes = switchboard2.output_channels) #sfa_top_node = mdp.nodes.SFANode(input_dim = switchboard2.out_channel_dim, output_dim = sfa_top_out) #mistake sfa_top_node = mdp.nodes.SFANode(input_dim = upper_sfalayer.output_dim, output_dim = sfa_dim) sfa_topexp =mdp.nodes.QuadraticExpansionNode(input_dim = sfa_dim) #noi_node = mdp.nodes.NoiseNode(input_dim = sfa_topexp.output_dim,noise_args=(0,sqrt(0.0005))) #test# sfa_topnode2 =mdp.nodes.SFANode(input_dim = sfa_topexp.output_dim,output_dim = sfa_top_out) sfa_over_node = mdp.hinet.FlowNode(mdp.Flow([sfa_top_node,sfa_topexp,sfa_topnode2])) #sfa_over_node = mdp.hinet.FlowNode(mdp.Flow([sfa_top_node,sfa_topexp,noi_node,sfa_topnode2])) #test# network = mdp.Flow([switchboard,sfalayer,switchboard2,upper_sfalayer,sfa_over_node]) return network
def get_hinet(hinet_config): """Return all the network information for a given parameter set. hinet_config -- Dict containing all the configuration info: 'image_size': Size of the input image. 'layer_configs': List of parameter dictionaries. """ global flow layers = [] prev_layer = None image_size = hinet_config["image_size"] for layer_config in hinet_config["layer_configs"]: if not prev_layer: # create fake layer, corresponding to image prev_layer = mdp.hinet.FlowNode( mdp.Flow([get_2d_image_switchboard(image_size)])) layers.append(get_sfa_layer(prev_layer, layer_config)) prev_layer = layers[-1] flow = mdp.Flow(layers) ## create HTML view xhtml_file = StringIO.StringIO() hinet_translator = mdp.hinet.HiNetXHTMLTranslator() hinet_translator.write_flow_to_file(flow=flow, xhtml_file=xhtml_file) ## create coverage layer representation coverage_svgs = [] coverage_ids = [] coverage_svgs.append( switchboard_svg.image_svg_representation(image_size=image_size, id_prefix=SVG_ID_PREFIX + "_0", element_size=SVG_BASE_SIZE, element_gap=SVG_GAP_SIZE, element_class=SVG_CLASS_NAME)) coverage_ids.append([]) # the pixels do not support coverage # now the layers for i_layer, layer in enumerate(layers): svg_id_prefix = SVG_ID_PREFIX + "_%d" % (i_layer + 1) coverage_svgs.append(layer[0].svg_representation( id_prefix=svg_id_prefix, element_size=SVG_BASE_SIZE * (i_layer + 2), element_gap=SVG_GAP_SIZE, element_class=SVG_CLASS_NAME)) coverage_ids.append([ svg_id_prefix + "_%d" % i for i in range(layer[0].output_channels) ]) return { "html_view": xhtml_file.getvalue(), "hinet_coverage_svgs": coverage_svgs, "hinet_coverage_ids": coverage_ids, "hinet_config_str": json.dumps(hinet_config, sort_keys=True, indent=4) }
def hierachynettest(bod, recf, ovl): switchboard = mdp.hinet.Rectangular2dSwitchboard(in_channels_xy=(bod, bod), field_channels_xy=(recf, recf), field_spacing_xy=(ovl, ovl)) sfa_dim = 48 sfa_lower_out = 32 sfanode = mdp.nodes.SFANode(input_dim=switchboard.out_channel_dim, output_dim=sfa_dim) sfa2node = mdp.nodes.QuadraticExpansionNode(input_dim=sfa_dim) sfanode2 = mdp.nodes.SFANode(input_dim=sfa2node.output_dim, output_dim=sfa_lower_out) flownode = mdp.hinet.FlowNode(mdp.Flow([sfanode, sfa2node, sfanode2])) sfalayer = mdp.hinet.CloneLayer(flownode, n_nodes=switchboard.output_channels) flow = mdp.Flow([switchboard, sfalayer]) sfa_upper_bo = 6 sfa_upper_recf = 4 sfa_upper_out = 32 sfa_top_out = 10 ovl2 = 2 switchboard2 = mdp.hinet.Rectangular2dSwitchboard( in_channels_xy=(sfa_upper_bo, sfa_upper_bo), field_channels_xy=(sfa_upper_recf, sfa_upper_recf), field_spacing_xy=(ovl2, ovl2), in_channel_dim=sfa_lower_out) sfa_uppernode = mdp.nodes.SFANode(input_dim=switchboard2.out_channel_dim, output_dim=sfa_dim) sfa_upperexp = mdp.nodes.QuadraticExpansionNode(input_dim=sfa_dim) sfa_uppernode2 = mdp.nodes.SFANode(input_dim=sfa_upperexp.output_dim, output_dim=sfa_upper_out) upper_flownode = mdp.hinet.FlowNode( mdp.Flow([sfa_uppernode, sfa_upperexp, sfa_uppernode2])) upper_sfalayer = mdp.hinet.CloneLayer(upper_flownode, n_nodes=switchboard2.output_channels) sfa_top_node = mdp.nodes.SFANode(input_dim=upper_sfalayer.output_dim, output_dim=sfa_dim) sfa_topexp = mdp.nodes.QuadraticExpansionNode(input_dim=sfa_dim) sfa_topnode2 = mdp.nodes.SFANode(input_dim=sfa_topexp.output_dim, output_dim=sfa_top_out) sfa_over_node = mdp.hinet.FlowNode( mdp.Flow([sfa_top_node, sfa_topexp, sfa_topnode2])) network = mdp.Flow( [switchboard, sfalayer, switchboard2, upper_sfalayer, sfa_over_node]) return network
def reservoir_analysis(): size = 1000 mix = am_mix_of_sine(size, 0.00001) # normalize mix # mix = mix / mix.max() # reservoir prototype #prot = filteresn.IIRESN() #prot.setInitParam( au.CONNECTIVITY, 0.2 ) #prot.setInitParam( au.ALPHA, 0.8 ) #prot.setSize(100) # # prot.setNoise( 1e-3 ) # # prot.setReservoirAct( au.ACT_LINEAR ) #prot.setSimAlgorithm( au.SIM_FILTER ) #prot.setLogBPCutoffs(f_start=0.01, f_stop=0.4, bw=0.2, fs=1.) # prot.setIIRCoeff(prot.B,prot.A,prot.serial) # define a Flow with a reservoir and a SFA output layer flow = mdp.Flow([ ReservoirNode(1, 100, dtype='float64'), ]) #prot.setIIRCoeff(prot.B,prot.A,prot.serial) flow.train(mix) slow = flow(mix) print slow.shape plot_signals(mix, slow)
def reservoir_multi(): size = 1000 mix = am_mix_of_sine(size, 0.0001) # reservoir prototype # prot = au.DoubleESN() # prot.setInitParam( au.CONNECTIVITY, 0.3 ) # prot.setInitParam( au.ALPHA, 0.8 ) # prot.setSize(10) switchboard = mdp.hinet.Switchboard(input_dim=1, connections=[0, 0, 0, 0, 0]) layer = mdp.hinet.Layer([ReservoirNode(1, 10, dtype='float64'), \ ReservoirNode(1, 10, dtype='float64'), \ ReservoirNode(1, 10, dtype='float64'), \ ReservoirNode(1, 10, dtype='float64'), \ ReservoirNode(1, 10, dtype='float64')]) flow = mdp.Flow( [switchboard, layer, mdp.nodes.SFANode(input_dim=50, output_dim=3)]) print flow print layer flow.train(mix) slow = flow(mix) # generate HTML grafic of the architecture # plot_architecture(flow) plot_signals(mix, slow)
def reservoir_multiplesines(): size = 1000 mix = am_mix_of_sine(size, 0.00001) # normalize mix # mix = mix / mix.max() # reservoir prototype #prot = au.DoubleESN() #prot.setInitParam( au.CONNECTIVITY, 0.2 ) #prot.setInitParam( au.ALPHA, 0.8 ) #prot.setSize(100) # prot.setNoise( 1e-3 ) # prot.setReservoirAct( au.ACT_LINEAR ) # define a Flow with a reservoir and a SFA output layer # flow = mdp.Flow([ReservoirNode(1, 50, 'float64', params), \ # mdp.nodes.TimeFramesNode(5), \ # mdp.nodes.ISFANode(output_dim=3)]) # mdp.nodes.NIPALSNode(output_dim=3)]) flow = mdp.Flow([ ReservoirNode(1, 100, dtype='float64'), # mdp.nodes.TimeFramesNode(10), \ mdp.nodes.SFANode(output_dim=3) ]) # flow = mdp.Flow([ReservoirNode(1, 50, 'float64', params), \ # mdp.nodes.PCANode(output_dim=3, svd=True) ]) # flow = mdp.Flow([ReservoirNode(1, 50, dtype='float64', params),]) flow.train(mix) slow = flow(mix) plot_signals(mix, slow)
def estimate_reservoir_distribution(n_samples, n_nodes, connectivity, input_connectivity_range, window_size): results = [] datasets = create_datasets(20, task_size=200, window_size=window_size, dataset_type='temporal_parity') training_dataset, test_dataset = datasets[:-1], datasets[-1] for input_connectivity in input_connectivity_range: logging.info('Sampling N={} with L={}.'.format(n_samples, input_connectivity)) for sample in range(n_samples): try: reservoir = RBNNode(connectivity=connectivity, output_dim=n_nodes, input_connectivity=input_connectivity) readout = Oger.nodes.RidgeRegressionNode( input_dim=reservoir.output_dim, output_dim=1) flow = mdp.Flow([reservoir, readout], verbose=1) flow.train([None, training_dataset]) accuracy = calculate_accuracy(flow, test_dataset) #cc = measure_computational_capability(reservoir, 100, window_size) #result = [input_connectivity, accuracy, cc] results.append([accuracy, reservoir]) #results.append(result) logging.info(accuracy) except Exception as e: logging.error(e) logging.error('Exception occured, Continuing anyways') logging.info(results) return results
def initialize_esn(self, verbose=False): #generate sparse reservoir weights and input weights, The results are good with sparse weights w_r = generate_sparse_w(output_size=self.reservoir_size, specrad=self.spectral_radius, seed=self.seed) w_in = generate_sparse_w_in(output_size=self.reservoir_size, input_size=self.input_dim, scaling=self.input_scaling, seed=self.seed) w_bias = generate_sparse_w_in(output_size=1, input_size=self.reservoir_size, scaling=self.bias_scaling, seed=self.seed) ## Instansiate reservoir node, read-out and flow self.reservoir = LeakyReservoirNode(nonlin_func=mdp.numx.tanh, input_dim=self.input_dim, output_dim=self.reservoir_size, leak_rate=self.leak_rate, w=w_r, w_in=w_in, w_bias=w_bias) self.read_out = RidgeRegressionNode(ridge_param=self.ridge, use_pinv=True, with_bias=True) self.flow = mdp.Flow([self.reservoir, self.read_out], verbose=self.verbose)
def create_reservoir(input_dim, output_dim, input_scaling, leak_rate, spectral_radius, input_sparsity, w_sparsity): """ Create a reservoir :param input_dim: Reservoir input dimension. :param output_dim: Reservoir size. :param input_scaling: Reservoir input scaling. :param leak_rate: Reservoir leaky rate. :param spectral_radius: Reservoir spectral radius. :param input_sparsity: Reservoir input sparsity. :param w_sparsity: Reservoir sparsity. :return: A MPD flow. """ # Create the reservoir # reservoir = RCNLPWordReservoirNode(input_dim=n_symbols, output_dim=size, input_scaling=input_scaling, # leak_rate=leak_rate, spectral_radius=spectral_radius, # word_sparsity=word_sparsity, w_sparsity=w_sparsity) reservoir = Oger.nodes.LeakyReservoirNode(input_dim=input_dim, output_dim=output_dim, input_scaling=input_scaling, leak_rate=leak_rate, spectral_radius=spectral_radius, sparsity=input_sparsity, w_sparsity=w_sparsity) # Reset state at each call reservoir.reset_states = True # Create the flow r_flow = mdp.Flow([reservoir], verbose=1) return r_flow
def test_func(x, disp=True): # leak, ridge, output_dim = (0.03, 0.01, 200) if not bounds(x_new=x): # print(' outside range') return 100 if disp: x_pr = ', '.join(["{0:0.3f}".format(i) for i in x]) print('x = (', x_pr, ')') # sys.stdout.flush() leak, ridge, radius = x n_neurons = 50 reduction = mdp.nodes.PCANode(input_dim=X_train.shape[1], output_dim=40) reservoir = Oger.nodes.LeakyReservoirNode(output_dim=n_neurons, leak_rate=leak, spectral_radius=radius) readout2 = Oger.nodes.RidgeRegressionNode(ridge_param=ridge) flow = mdp.Flow([reduction, reservoir, readout2]) flow.train(data) testout = flow(X[N_train:]) # error = Oger.utils.nrmse(y[N_train:][not_blank], np.sign(testout[not_blank])) error = Oger.utils.nrmse(y[N_train:][not_blank], testout[not_blank]) if disp: print('\terror = {0:0.3f}'.format(error)) # print('', error) return error
def _hinet_node(self, input_dim, n_features): if mdp.numx.isscalar(n_features): n_features = [n_features] n_features = list(n_features) flow = [] if n_features[0] == -1: n_features[0] = input_dim if n_features[0] > input_dim: _warnings.warn( "\nNumber of output features of SFA1 node (%d) is greater than its input_dim (%d). " "Setting them equal." % (n_features[0], input_dim)) sfa1_node = mdp.nodes.SFANode(input_dim=input_dim, output_dim=input_dim, dtype=self.dtype) else: sfa1_node = mdp.nodes.SFANode(input_dim=input_dim, output_dim=n_features[0], dtype=self.dtype) flow.append(sfa1_node) if len(n_features) > 1: exp_node = mdp.nodes.QuadraticExpansionNode(input_dim=sfa1_node.output_dim, dtype=self.dtype) if n_features[1] == -1: n_features[1] = exp_node.output_dim if n_features[1] > exp_node.output_dim: _warnings.warn( "\nNumber of output features of SFA2 node (%d) is greater than its input_dim (%d). " "Setting them equal." % (n_features[1], exp_node.output_dim)) sfa2_node = mdp.nodes.SFANode(input_dim=exp_node.output_dim, output_dim=exp_node.output_dim, dtype=self.dtype) else: sfa2_node = mdp.nodes.SFANode(input_dim=exp_node.output_dim, output_dim=n_features[1], dtype=self.dtype) flow.extend([exp_node, sfa2_node]) node = mdp.hinet.FlowNode(mdp.Flow(flow), dtype=self.dtype) return node
def __init__(self, size, leak_rate, input_scaling, w_sparsity, input_sparsity, spectral_radius, converter, n_classes, w=None): # Properties self._input_dim = converter.get_n_inputs() self._output_dim = size self._leak_rate = leak_rate self._input_scaling = input_scaling self._w_sparsity = w_sparsity self._input_sparsity = input_sparsity self._spectral_radius = spectral_radius self._converter = converter self._n_classes = n_classes self._examples = dict() # Create the reservoir self._reservoir = Oger.nodes.LeakyReservoirNode(input_dim=self._input_dim, output_dim=self._output_dim, input_scaling=input_scaling, leak_rate=leak_rate, spectral_radius=spectral_radius, sparsity=input_sparsity, w_sparsity=w_sparsity, w=w) # Reset state at each call self._reservoir.reset_states = True # Ridge Regression self._readout = Oger.nodes.RidgeRegressionNode() # Flow self._flow = mdp.Flow([self._reservoir, self._readout], verbose=0)
def initialize_esn(self, verbose=False): #generate sparse reservoir weights and input weights, The results are good with sparse weights w_r = generate_sparse_w(output_size=self.reservoir_size, specrad=self.spectral_radius, seed=self.seed) w_in = generate_sparse_w_in(output_size=self.reservoir_size, input_size=self.input_dim, scaling=self.input_scaling, seed=self.seed) w_bias = generate_sparse_w_in(output_size=1, input_size=self.reservoir_size, scaling=self.bias_scaling, seed=self.seed) #w_r=generate_internal_weights(reservoir_size=self.reservoir_size,spectral_radius=self.spectral_radius,seed=self.seed,proba=0.1) #w_in=generate_input_weights(reservoir_size=self.reservoir_size,input_dim=self.input_dim,input_scaling=self.input_scaling,seed=self.seed,proba=0.3) #w_bias=generate_input_weights(reservoir_size=1,input_dim=self.reservoir_size,input_scaling=self.bias_scaling,seed=self.seed,proba=0.3) ## Instansiate reservoir node, read-out and flow self.reservoir = LeakyReservoirNode(nonlin_func=mdp.numx.tanh, input_dim=self.input_dim, output_dim=self.reservoir_size, leak_rate=self.leak_rate, w=w_r, w_in=w_in, w_bias=w_bias, _instance=self._instance) self.read_out = RidgeRegressionNode(ridge_param=self.ridge, use_pinv=True, with_bias=True) #read_out = RidgeRegressionNode(ridge_param=self.ridge,other_error_measure= rmse,cross_validate_function=n_fold_random,n_folds=10,verbose=self.verbose) self.flow = mdp.Flow([self.reservoir, self.read_out], verbose=self.verbose)
def test_Flow_deepcopy_lambda(): """Copying a Flow with a lambda member function should not throw an Exception""" generic_node = mdp.Node() generic_node.lambda_function = lambda: 1 generic_flow = mdp.Flow([generic_node]) generic_flow.copy()
def create_reservoir(input_dim, output_dim, input_scaling, leak_rate, t_in, t_out): """ :param input_dim: :param output_dim: :param input_scaling: :param leak_rate: :param t_in: :param t_out: :return: """ r_reservoir = Oger.nodes.LeakyReservoirNode(input_dim=input_dim, output_dim=output_dim, input_scaling=input_scaling, leak_rate=leak_rate) r_readout = Oger.nodes.RidgeRegressionNode() # Create the flow r_flow = mdp.Flow([r_reservoir, r_readout], verbose=1) # Reservoir input data r_data = [t_in, zip(t_in, t_out)] # Train r_flow.train(r_data) return r_flow
def setup_parallel_training(self, data_iterables, train_callable_class=FlowTrainCallable): """Prepare the flow for handing out tasks to do the training. After calling setup_parallel_training one has to pick up the tasks with get_task, run them and finally return the results via use_results. tasks are available as long as task_available returns True. Training may require multiple phases, which are each closed by calling use_results. data_iterables -- A list of iterables, one for each node in the flow. The iterators returned by the iterables must return data arrays that are then used for the node training. See Flow.train for more details. If a custom train_callable_class is used to preprocess the data then other data types can be used as well. train_callable_class -- Class used to create training callables for the scheduler. By specifying your own class you can implement data transformations before the data is actually fed into the flow (e.g. from 8 bit image to 64 bit double precision). """ if self.is_parallel_training: err = "Parallel training is already underway." raise ParallelFlowException(err) self._train_callable_class = train_callable_class self._train_data_iterables = self._train_check_iterables( data_iterables) self._i_train_node = 0 self._flownode = FlowNode(mdp.Flow(self.flow)) self._next_train_phase()
def create_reservoir(n_symbols, word_sparsity, size, input_scaling, leak_rate, spectral_radius, w_sparsity): """ Create a reservoir. :param input_dim: :param output_dim: :param input_scaling: :param leak_rate: :param t_in: :param t_out: :return: """ # Create the reservoir reservoir = RCNLPWordReservoirNode(input_dim=n_symbols, output_dim=size, input_scaling=input_scaling, leak_rate=leak_rate, spectral_radius=spectral_radius, word_sparsity=word_sparsity, w_sparsity=w_sparsity) # Create the flow r_flow = mdp.Flow([reservoir], verbose=1) return r_flow
def visualize_correctness(n=25, working_dir=None): if not working_dir: working_dir = get_working_dir() (reservoir_input, expected_output), _ =\ glob_load(working_dir + '*-dataset')[0] rbn_reservoir, _ = glob_load(working_dir + '*-reservoir')[0] readout, _ = glob_load(working_dir + '*-readout')[0] rbn_reservoir.reset_state() flow = mdp.Flow([rbn_reservoir, readout], verbose=1) actual_output = flow.execute(reservoir_input) for output in actual_output: output[0] = 1 if output[0] > 0.5 else 0 errors = sum(actual_output != expected_output) accuracy = 1 - float(errors) / len(actual_output) plt.title('Reservoir performance') plt.plot(actual_output[:n], 'y', linewidth=1.5) plt.plot(expected_output[:n], 'b', linewidth=1.5) plt.legend(['Actual output', 'Expected output']) plt.savefig('temp-2.pdf', bbox_inches='tight')
def testStateCompression(self, level=1): """ Test a reservoir with state compression in the readout. (Not really a test, should just show how to build up these networks ...) """ compr_points = 5 # construct mdp ESN reservoir = ReservoirNode(self.inputs, self.size, dtype='float64', prototype=self.net) compr = StateCompressionNode(self.size+self.inputs, support_points=compr_points) readout = LinearReadoutNode(compr_points*(self.size+self.inputs), self.outputs, ignore=0, use_pi=1) # build hierarchical mdp network res = mdp.hinet.SameInputLayer([reservoir, IdentityNode(self.inputs, self.inputs)]) flow = mdp.Flow([res, compr, readout]) # plot_mdp_network(flow,"mdp_network_1.html") mdp_net = mdp.hinet.FlowNode(flow) # train mdp network trainout = self.train_out[-1,:].reshape(1,-1) mdp_net.train(self.train_in, trainout) mdp_net.stop_training() # mdp_net.train([None, None, [(self.train_in, trainout)]]) # run the model with test data and check the shape mdp_out = mdp_net(self.test_in) assert mdp_out.shape == trainout.shape, 'incorrect output shape'
def create_network(network, subimage_width, subimage_height, benchmark, in_channel_dim=1, num_features_appended_to_input=0): """ This function creates a hierarchical network according to the description stored in the object 'network'. The object 'network' is of type system_parameters.ParamsNetwork() and contains several layers (either hierarchical or non-hierarchical). """ print "Using hierarchical network: ", network.name if len(network.layers) > 0: layers = [] for layer in network.layers: if layer is not None: layers.append(layer) else: er = "Obsolete network description? network.layers should have at least one layer!" raise Exception(er) layers[0].in_channel_dim = in_channel_dim # 1 for L, 3 for RGB for i in range(len(layers)): if i > 0: layers[i].in_channel_dim = layers[i - 1].sfa_out_dim print "Layers: ", layers t1 = time.time() print "layers =", layers node_list = [] previous_layer = None for i, layer in enumerate(layers): if i == 0: layer = create_layer(None, layer, i, subimage_height, subimage_width, num_features_appended_to_input) else: layer = create_layer(previous_layer, layer, i) previous_layer = layer print "L=", layer print "L.node_list=", layer.node_list node_list.extend(layer.node_list) node_list = remove_Nones(node_list) print "Flow.node_list=", node_list flow = mdp.Flow(node_list, verbose=True) t2 = time.time() print "Finished hierarchy construction, with total time %0.3f ms" % ( (t2 - t1) * 1000.0) if benchmark: benchmark.append(("Hierarchy construction", t2 - t1)) return flow, layers, benchmark
def __init__(self, input_dim=None, output_dim=None, dtype='float64', nr_experts=10, support_points=5, prototype=None): """ Init the linear readout. nr_experts -- how many individual reservoirs are in the array support_points -- States are segmented into this number of support points. Between discrete states the values are interpolated linearly. prototype -- a prototype reservoir which will be cloned with all its parameters """ super(ReservoirArrayStateComprNode, self).__init__(input_dim, output_dim, dtype) self.nr_experts = nr_experts self.support_points = support_points res_size = prototype.getSize() expert_array = [] for n in range(self.nr_experts): # make a deep copy and new initialization of all reservoirs reservoir = ReservoirNode(input_dim, res_size, dtype, prototype) compr = StateCompressionNode(res_size + input_dim, support_points=self.support_points) readout = LinearReadoutNode(self.support_points * (res_size + input_dim), output_dim, ignore=0, use_pi=1) res = mdp.hinet.SameInputLayer( [reservoir, IdentityNode(input_dim, input_dim)]) flow = mdp.Flow([res, compr, readout]) expert_array.append(mdp.hinet.FlowNode(flow)) # build multiple networks experts = mdp.hinet.SameInputLayer(expert_array) output_layer = VoteAverageNode(output_dim * self.nr_experts, output_dim) self.network = mdp.hinet.FlowNode(mdp.Flow([experts, output_layer]))
def calculate_fitness(self, genotype): rbn_reservoir = genotype_to_phenotype( genotype, self.n_nodes, self.connectivity) rbn_reservoir.reset_state() flow = mdp.Flow([rbn_reservoir, self.readout], verbose=1) return calculate_accuracy(flow, self.dataset)
def reset(self): del self._readout, self._flow # Ridge Regression self._readout = Oger.nodes.RidgeRegressionNode() # Flow self._flow = mdp.Flow([self._reservoir, self._readout], verbose=0)
def hierarchical_multiplesines(): size = 1000 mix = am_mix_of_sine(size, 0.00001) # mix = am_mix_of_sine(size,0.01) # normalize mix # mix = mix / mix.max() # reservoir prototype #prot = au.DoubleESN() #prot.setInitParam( au.CONNECTIVITY, 0.2 ) #prot.setInitParam( au.ALPHA, 0.8 ) #prot.setSize(100) # prot.setNoise( 1e-3 ) # prot.setReservoirAct( au.ACT_LINEAR ) # Hierarchical Network with Reservoirs and SFA nodes layer1 = mdp.Flow([ReservoirNode(1, 100, dtype='float64'), \ mdp.nodes.SFANode(output_dim=3), \ # mdp.nodes.PCANode(output_dim=5,svd=True), \ ResampleNode(3, 0.4, window="hamming") ]) #prot.setSize(50) layer2 = mdp.Flow([ReservoirNode(3, 50, dtype='float64'), \ mdp.nodes.SFANode(output_dim=3), \ # mdp.nodes.PCANode(output_dim=5,svd=True), \ ResampleNode(3, 0.4, window="hamming") ]) #prot.setSize(50) layer3 = mdp.Flow([ReservoirNode(3, 50, dtype='float64'), \ # mdp.nodes.PCANode(output_dim=3,svd=True) ]) mdp.nodes.SFANode(output_dim=3) ]) layer1.train(mix) slow1 = layer1(mix) layer2.train(slow1) slow2 = layer2(slow1) layer3.train(slow2) slow3 = layer3(slow2) print slow1.shape, slow2.shape, slow3.shape plot_all_signals(mix, slow1) plot_all_signals(mix, slow2) plot_all_signals(mix, slow3)
def test_two_nodes2(self): """Test a TestBiFlowNode with two normal nodes using a normal Flow.""" sfa_node = mdp.nodes.SFANode(input_dim=10, output_dim=8) sfa2_node = mdp.nodes.SFA2Node(input_dim=8, output_dim=6) flownode = BiFlowNode(BiFlow([sfa_node, sfa2_node])) flow = mdp.Flow([flownode]) data_iterables = [[n.random.random((30,10)) for _ in range(6)]] flow.train(data_iterables) x = n.random.random([100,10]) flow.execute(x)
def get_sfa_layer(prev_layer, layer_config): switchboard_class = SWITCHBOARD_TYPES_DICT[layer_config["layer_type"]] switchboard_params = {} for key in switchboard_class.free_parameters: if key in layer_config: switchboard_params[key] = layer_config[key] switchboard = switchboard_class.create_switchboard( free_params=switchboard_params, prev_switchboard=prev_layer[0], prev_output_dim=prev_layer.output_dim) sfa_input_dim = switchboard.out_channel_dim sfa_node = mdp.nodes.SFANode(input_dim=sfa_input_dim, output_dim=layer_config["sfa_dim"]) sfa2_node = mdp.nodes.SFA2Node(input_dim=layer_config["sfa_dim"], output_dim=layer_config["sfa2_dim"]) flownode = mdp.hinet.FlowNode(mdp.Flow([sfa_node, sfa2_node])) sfa_layer = mdp.hinet.CloneLayer(flownode, n_nodes=switchboard.output_channels) return mdp.hinet.FlowNode(mdp.Flow([switchboard, sfa_layer]))
def pca_multiplesines(): size = 1000 mix = am_mix_of_sine(size, 0.01) # perform PCA flow = mdp.Flow([mdp.nodes.TimeFramesNode(10), \ mdp.nodes.PCANode(output_dim=3, svd=True)]) flow.train(mix) pca = flow(mix) plot_signals(mix, pca)