def main(): data = GenericClassificationDataset("cifar10", "cifar_10_shuffled.pkl") N = data.train[0].shape[0] * 1. model, learn, test = build_model() some_probs = [] epoch = 0 experiment = {"results":None, } lr = 0.001 # * 100 / (i+100) costs = [] errors = [] valid_costs = [] valid_errors = [] for i in range(1000): epoch = i cost = 0 error = 0 for x,y in data.trainMinibatches(128): c,e = learn(x,y,lr) cost += c error += e t0 = time.time() valid_error, valid_cost = data.validate(test, 50) valid_time = time.time() - t0 print print i, cost/N, error/N print valid_error, valid_cost, valid_time errors.append(error/N) costs.append(cost/N) valid_errors.append(valid_error) valid_costs.append(valid_cost) tools.export_feature_image(model.layers[0].h.W, "W_img.png", (32,32,3)) tools.export_multi_plot1d([errors, valid_errors], "errors.png", "error") tools.export_multi_plot1d([costs, valid_costs], "costs.png", "cost") experiment["results"] = [valid_costs, valid_errors, costs, errors] experiment["valid_time"] = valid_time pickle.dump(experiment, file("experiment.pkl",'w'),-1) shared.exportToFile("weights.pkl")
def main(): data = GenericClassificationDataset("cifar10", "cifar_10_shuffled.pkl") N = data.train[0].shape[0] * 1. model, learn, test = build_model() some_probs = [] epoch = 0 experiment = { "results": None, } lr = 0.001 # * 100 / (i+100) costs = [] errors = [] valid_costs = [] valid_errors = [] for i in range(1000): epoch = i cost = 0 error = 0 for x, y in data.trainMinibatches(128): c, e = learn(x, y, lr) cost += c error += e t0 = time.time() valid_error, valid_cost = data.validate(test, 50) valid_time = time.time() - t0 print print i, cost / N, error / N print valid_error, valid_cost, valid_time errors.append(error / N) costs.append(cost / N) valid_errors.append(valid_error) valid_costs.append(valid_cost) tools.export_feature_image(model.layers[0].h.W, "W_img.png", (32, 32, 3)) tools.export_multi_plot1d([errors, valid_errors], "errors.png", "error") tools.export_multi_plot1d([costs, valid_costs], "costs.png", "cost") experiment["results"] = [valid_costs, valid_errors, costs, errors] experiment["valid_time"] = valid_time pickle.dump(experiment, file("experiment.pkl", 'w'), -1) shared.exportToFile("weights.pkl")
t0 = time.time() valid_error, valid_cost = data.validate(test, 50) valid_time = time.time() - t0 print print i, cost/N, error/N print valid_error, valid_cost, valid_time print probs.mean() / N print probs / N errors.append(error/N) costs.append(cost/N) valid_errors.append(valid_error) valid_costs.append(valid_cost) tools.export_feature_image(model.layers[0].h.W, "W_img.png", (32,32,3)) tools.export_feature_image(model.layers[0].d.W, "Z_img.png", (32,32,3)) tools.export_multi_plot1d([errors, valid_errors], "errors.png", "error") tools.export_multi_plot1d([costs, valid_costs], "costs.png", "cost") experiment["results"] = [valid_costs, valid_errors, costs, errors] experiment["valid_time"] = valid_time pickle.dump(experiment, file("experiment.pkl",'w'),-1) shared.exportToFile("weights.pkl") video.stdin.close() video.wait() def test(expid): # to test: # OMP_NUM_THREADS=1 THEANO_FLAGS=device=cpu taskset -c 0 python $(expip)/code.py $(expid) import os os.chdir(expid) print "loading data"