Returns: The neuron activation based on the summed output. """ return z if z >= 0 else 0.01 * z if __name__ == '__main__': # if executed from automation script if len(argv) == 3: AUTO = bool(int(argv[2])) else: AUTO = False MSE, TRP, TEP = [], [], [] # set up variables to store testing data # load data to train and test network on TRAIN, TEST = io.load_data(f'../data/{argv[1]}.csv', par.get_holdout()) # network-specific parameters FEATURES = len(TRAIN[0][:-1]) # number of attributes of data CLASSES = len({c[-1] for c in TRAIN + TEST}) # distinct classifications HIDDEN_SIZE = par.get_hidden_size(argv[1]) DIMENSIONS = (HIDDEN_SIZE * (FEATURES + 1)) + (CLASSES * (HIDDEN_SIZE + 1)) EPOCHS, AXIS_RANGE = par.get_epochs(), par.get_rand_range() # de-specific parameters POP_SIZE = par.get_de_population_size() CROSS_RATE, DIFF_WEIGHT = par.get_de_params(argv[1]) # run the de-nn differential_evolution(DIMENSIONS, EPOCHS, POP_SIZE, AXIS_RANGE, \ CROSS_RATE, DIFF_WEIGHT) if not AUTO: io.plot_data(EPOCHS, MSE, TRP, TEP) exit(0)
z : summing output. Returns: The differential of the neural output. """ return z * (1 - z) if __name__ == '__main__': # if executed from automation script if len(argv) == 3: AUTO = bool(int(argv[2])) else: AUTO = False TRAIN, TEST = io.load_data(f'../data/{argv[1]}.csv') FEATURES = len(TRAIN[0][:-1]) CLASSES = len({c[-1] for c in TRAIN + TEST}) HIDDEN_SIZE = par.get_hidden_size(argv[1]) DIMENSIONS = (HIDDEN_SIZE * (FEATURES+1)) + \ (CLASSES * (HIDDEN_SIZE+1)) WEIGHTS = [random.uniform(par.get_rand_range()[0], par.get_rand_range()[1])\ for _ in range(DIMENSIONS)] NETWORK = net.initialize_network(WEIGHTS, FEATURES, HIDDEN_SIZE, CLASSES) LEARNING_RATE, MOMENTUM_RATE = par.get_bp_params(argv[1]) EPOCHS = par.get_epochs() MSE, TRP, TEP = [], [], [] stochastic_gradient_descent(NETWORK, CLASSES, TRAIN) if not AUTO: io.plot_data(EPOCHS, MSE, TRP, TEP) exit(0)