isConstantConnectivity=False) res = Reservoir(I, L, bn_directory + 'time_series_data_3.csv', directory + 'experiment1_2018-08-03.csv', N_list[i], varF, F, init) # Train and test output layer output = OutputLayer(res, O, functionsToApproximate, functionInputs, delay, dataStreamLength, nonRecursiveArgs=[[(0, 2)]]) output.train(trainingSize) output.test(testSize) # add results to data print('adding data') data[i, j] = sum([output.successRates[k] for k in range(O)]) / O time = int(time.clock() - start) # Write metadata to file f_utils.printParameters(N_list, K, I, L_list, window, delay, dataStreamLength, trainingSize, testSize, O, 'recursiveParity', seed, time) # write data to file df = pd.DataFrame(data) df.index = N_list print(df.to_csv(header=L_list))
L = L_list[j] * N_list[i] // 100 # Initialize reservoir bn_directory = os.getcwd() + '/BN_realization/' directory = os.getcwd() + '/' varF, F, init = bn.getRandomParameters(N_list[i] + I, K + (L / N_list[i]), isConstantConnectivity=False) res = Reservoir(I, L, bn_directory + 'time_series_data_3.csv', directory + 'experiment1_2018-08-03.csv', N_list[i], varF, F, init) # Train and test output layer output = OutputLayer(res, O, functionsToApproximate, functionInputs, delay, dataStreamLength) output.train(trainingSize) output.test(testSize) # add results to data data[i, j] = sum([output.successRates[k] for k in range(O)]) / O time = int(time.clock() - start) # Write metadata to file f_utils.printParameters(N_list, K, I, L_list, window, delay, dataStreamLength, trainingSize, testSize, O, 'allThreeBit', seed, time) # write data to file df = pd.DataFrame(data) df.index = N_list print(df.to_csv(header=L_list))