def __init__(self, filename): Thermometer.__init__(self) self.points = [] for line in open(filename): self.points.append(Point.from_csv_line(line)) # choosing initial state at random shuffle(self.points) # switching interactive plots on plt.ion()
def __init__(self, width): # set plot to animated self.x1s = [0] self.y1s = [0] self.x2s = [0] self.y2s = [0] self.x3s = [0] self.y3s = [0] self.plt1, self.plt2, self.plt3 = plt.plot( self.x1s, self.y1s, 'rx', self.x2s, self.y2s, 'b.', self.x3s, self.y3s, 'gs', alpha=0.05, linewidth=3) self.latest = deque([0] * 20) self.plt3.set_alpha = 0.8 plt.ylim([0, 100]) plt.xlim([0, width]) plt.ion()
def do_plot(): if solver.iter % display == 0: loss[solver.iter] = solver.net.blobs['loss3/loss3'].data.copy() loss_disp = 'loss=' + str(loss[solver.iter]) print '%3d) %s' % (solver.iter, loss_disp) train_loss[solver.iter / display] = loss[solver.iter] ax1.plot(it_axes[0:solver.iter / display], train_loss[0:solver.iter / display], 'r') # if it > test_interval: # ax1.plot(it_val_axes[0:it/test_interval], val_loss[0:it/test_interval], 'g') #Val always on top ax1.set_ylim([5, 7]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) # VALIDATE Validation done this way only uses 1 GPU if solver.iter % test_interval == 0 and solver.iter > 0: loss_val = 0 for i in range(test_iters): solver.test_nets[0].forward() loss_val += solver.test_nets[0].blobs['loss3/loss3'].data loss_val /= test_iters print("Val loss: {:.3f}".format(loss_val)) val_loss[solver.iter / test_interval - 1] = loss_val ax1.plot(it_val_axes[0:solver.iter / test_interval], val_loss[0:solver.iter / test_interval], 'g') ax1.set_ylim([5, 7]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) title = '../../../datasets/SocialMedia/models/training/' + training_id + str( solver.iter) + '.png' # Save graph to disk savefig(title, bbox_inches='tight')
def plotLive(combine_Type, combine_Name, lat_Name, long_Name, massFlow_Name, filename): data = pd.read_csv(filename) if combine_Type != 0: comb_df = data[data[combine_Name] == combine_Type] lat_df = comb_df[lat_Name] lon_df = comb_df[long_Name] y = comb_df[massFlow_Name] else: lat_df = data[lat_Name] lon_df = data[long_Name] y = data[massFlow_Name] e,n = convertToUTM(lat_df, lon_df) def makeFig(): plt.plot(x,y) plt.ylabel('Easting') plt.xlabel('Northing') plt.ion() # enable interactivity plt.grid() fig = plt.figure() # make a figure x=list() y=list() for i in arange(len(n)): x.append(n[i]) y.append(e[i]) i+=1 drawnow(makeFig)
def __init__(self, width): # set plot to animated self.x1s = [0] self.y1s = [0] self.x2s = [0] self.y2s = [0] self.x3s = [0] self.y3s = [0] self.plt1, self.plt2, self.plt3 = plt.plot(self.x1s, self.y1s, 'rx', self.x2s, self.y2s, 'b.', self.x3s, self.y3s, 'gs', alpha=0.05, linewidth=3) self.latest = deque([0] * 20) self.plt3.set_alpha = 0.8 plt.ylim([0, 100]) plt.xlim([0, width]) plt.ion()
""" #%% add CalBlitz folder to python directory path_to_CalBlitz_folder='/Users/agiovann/Documents/SOFTWARE/CalBlitz' import sys sys.path sys.path.append(path_to_CalBlitz_folder) #% add required packages from XMovie import XMovie import time from pylab import plt import numpy as np from utils import matrixMontage #% set basic ipython functionalities try: plt.ion() %load_ext autoreload %autoreload 2 except: print "Probably not a Ipython interactive environment" #%% define movie #filename='k26_v1_176um_target_pursuit_001_005.tif' #frameRate=.033; #filename='20150522_1_1_001.tif' #filename='M_FLUO.tif' #frameRate=.064;
def do_solve(niter, solvers, disp_interval, test_interval, test_iters): """Run solvers for niter iterations, returning the loss and recorded each iteration. `solvers` is a list of (name, solver) tuples.""" import tempfile import numpy as np import os from pylab import zeros, arange, subplots, plt, savefig import time # SET PLOTS DATA train_loss = zeros(niter / disp_interval) val_loss = zeros(niter / test_interval) it_axes = (arange(niter) * disp_interval) + disp_interval it_val_axes = (arange(niter) * test_interval) + test_interval _, ax1 = subplots() # ax2 = ax1.twinx() ax1.set_xlabel('iteration') ax1.set_ylabel('train loss (r), val loss (g)') # ax2.set_ylabel('val loss (g)') # ax2.set_autoscaley_on(False) # ax2.set_ylim([0, 1]) loss = {name: np.zeros(niter) for name, _ in solvers} #RUN TRAINING for it in range(niter): for name, s in solvers: # start = time.time() s.step(1) # run a single SGD step in Caffe # end = time.time() # print "Time step: " + str((end - start)) loss[name][it] = s.net.blobs['loss3/loss3'].data.copy() #PLOT if it % disp_interval == 0 or it + 1 == niter: loss_disp = 'loss=' + str(loss['my_solver'][it]) print '%3d) %s' % (it, loss_disp) train_loss[it / disp_interval] = loss['my_solver'][it] ax1.plot(it_axes[0:it / disp_interval], train_loss[0:it / disp_interval], 'r') ax1.set_ylim([4, 7]) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) # title = '../training/numbers/training-' + str(it) + '.png' # Save graph to disk # savefig(title, bbox_inches='tight') #VALIDATE if it % test_interval == 0 and it > 0: loss_val = 0 for i in range(test_iters): solvers[0][1].test_nets[0].forward() loss_val += solvers[0][1].test_nets[0].blobs[ 'loss3/loss3'].data loss_val /= test_iters print("Val loss: {:.3f}".format(loss_val)) val_loss[it / test_interval - 1] = loss_val ax1.plot(it_val_axes[0:it / test_interval], val_loss[0:it / test_interval], 'g') ax1.set_ylim([4, 7]) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) title = '../../../datasets/recipes5k/models/training/training-ingredients_Inception_frozen_500_raw' + str( it) + '.png' # Save graph to disk savefig(title, bbox_inches='tight') #Save the learned weights from both nets at the end of the training weight_dir = tempfile.mkdtemp() weights = {} for name, s in solvers: filename = 'weights.%s.caffemodel' % name weights[name] = os.path.join(weight_dir, filename) s.net.save(weights[name]) return loss, weights
def do_solve(maxIter, solver, display, test_interval, test_iters): # SET PLOTS DATA train_loss_C = zeros(maxIter/display) train_top1 = zeros(maxIter/display) train_top5 = zeros(maxIter/display) val_loss_C = zeros(maxIter/test_interval) val_top1 = zeros(maxIter/test_interval) val_top5 = zeros(maxIter/test_interval) it_axes = (arange(maxIter) * display) + display it_val_axes = (arange(maxIter) * test_interval) + test_interval _, ax1 = subplots() ax2 = ax1.twinx() ax1.set_xlabel('iteration') ax1.set_ylabel('train loss C (r), val loss C (y)') ax2.set_ylabel('train TOP1 (b), val TOP1 (g), train TOP-5 (c), val TOP-5 (k)') ax2.set_autoscaley_on(False) ax2.set_ylim([0, 1]) lossC = np.zeros(maxIter) acc1 = np.zeros(maxIter) acc5 = np.zeros(maxIter) #RUN TRAINING for it in range(niter): #st = time.time() solver.step(1) # run a single SGD step in Caffepy() #en = time.time() #print "Time step: " + str((en-st)) #PLOT if it % display == 0 or it + 1 == niter: lossC[it] = solver.net.blobs['loss3/loss3'].data.copy() acc1[it] = solver.net.blobs['loss3/top-1'].data.copy() acc5[it] = solver.net.blobs['loss3/top-5'].data.copy() loss_disp = 'loss3C= ' + str(lossC[it]) + ' top-1= ' + str(acc1[it]) print '%3d) %s' % (it, loss_disp) train_loss_C[it / display] = lossC[it] train_top1[it / display] = acc1[it] train_top5[it / display] = acc5[it] ax1.plot(it_axes[0:it / display], train_loss_C[0:it / display], 'r') ax2.plot(it_axes[0:it / display], train_top1[0:it / display], 'b') ax2.plot(it_axes[0:it / display], train_top5[0:it / display], 'c') #ax1.set_ylim([0, 10]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) #VALIDATE if it % test_interval == 0 and it > 0: loss_val_C = 0 top1_val = 0 top5_val = 0 for i in range(test_iters): solver.test_nets[0].forward() loss_val_C += solver.test_nets[0].blobs['loss3/loss3'].data top1_val += solver.test_nets[0].blobs['loss3/top-1'].data top5_val += solver.test_nets[0].blobs['loss3/top-5'].data loss_val_C /= test_iters top1_val /= test_iters top5_val /= test_iters print("Val loss C: {:.3f}".format(loss_val_C)) val_loss_C[it / test_interval - 1] = loss_val_C val_top1[it / test_interval - 1] = top1_val val_top5[it / test_interval - 1] = top5_val ax1.plot(it_val_axes[0:it / test_interval], val_loss_C[0:it/ test_interval], 'y') ax2.plot(it_val_axes[0:it / test_interval], val_top1[0:it / test_interval], 'g') ax2.plot(it_val_axes[0:it / test_interval], val_top5[0:it / test_interval], 'k') #ax1.set_ylim([0, 10]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) title = '../../../datasets/WebVision/models/training/' + training_id + str(it) + '.png' # Save graph to disk savefig(title, bbox_inches='tight') return
def do_plot(): if solver.iter % display == 0: lossC[solver.iter] = solver.net.blobs['loss3/loss3'].data.copy() lossR[solver.iter] = solver.net.blobs['loss3/loss3/R'].data.copy() acc1[solver.iter] = solver.net.blobs['loss3/top-1'].data.copy() acc5[solver.iter] = solver.net.blobs['loss2/top-5'].data.copy() loss_disp = 'loss3C= ' + str(lossC[solver.iter]) + ' loss3R= ' + str(lossR[solver.iter]) + ' top-1= ' + str(acc1[solver.iter]) print '%3d) %s' % (solver.iter, loss_disp) train_loss_C[solver.iter / display] = lossC[solver.iter] train_loss_R[solver.iter / display] = lossR[solver.iter] train_top1[solver.iter / display] = acc1[solver.iter] train_top5[solver.iter / display] = acc5[solver.iter] ax1.plot(it_axes[0:solver.iter / display], train_loss_C[0:solver.iter / display], 'r') ax1.plot(it_axes[0:solver.iter / display], train_loss_R[0:solver.iter / display], 'm') ax2.plot(it_axes[0:solver.iter / display], train_top1[0:solver.iter / display], 'b') ax2.plot(it_axes[0:solver.iter / display], train_top5[0:solver.iter / display], 'c') ax1.set_ylim([0, 10]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) # VALIDATE Validation done this way only uses 1 GPU if solver.iter % test_interval == 0 and solver.iter > 0: loss_val_R = 0 loss_val_C = 0 top1_val = 0 for i in range(test_iters): solver.test_nets[0].forward() loss_val_C += solver.test_nets[0].blobs['loss3/loss3'].data loss_val_R += solver.test_nets[0].blobs['loss3/loss3/R'].data top1_val += solver.test_nets[0].blobs['loss3/top-1'].data loss_val_C /= test_iters loss_val_R /= test_iters top1_val /= test_iters print("Val loss C: {:.3f}".format(loss_val_C)) val_loss_C[solver.iter / test_interval - 1] = loss_val_C val_loss_R[solver.iter / test_interval - 1] = loss_val_R val_top1[solver.iter / test_interval - 1] = top1_val ax1.plot(it_val_axes[0:solver.iter / test_interval], val_loss_C[0:solver.iter / test_interval], 'y') ax1.plot(it_val_axes[0:solver.iter / test_interval], val_loss_R[0:solver.iter / test_interval], 'k') ax2.plot(it_val_axes[0:solver.iter / test_interval], val_top1[0:solver.iter / test_interval], 'g') ax1.set_ylim([0, 10]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) title = '../../../datasets/WebVision/models/training/' + training_id + str( solver.iter) + '.png' # Save graph to disk savefig(title, bbox_inches='tight')
def do_solve(niter, solvers, disp_interval, test_interval, test_iters): """Run solvers for niter iterations, returning the loss and accuracy recorded each iteration. `solvers` is a list of (name, solver) tuples.""" import tempfile import numpy as np import os from pylab import zeros, arange, subplots, plt, savefig import time # SET PLOTS DATA train_loss = zeros(niter/disp_interval) train_acc = zeros(niter/disp_interval) val_acc = zeros(niter/test_interval) it_axes = (arange(niter) * disp_interval) + disp_interval it_val_axes = (arange(niter) * test_interval) + test_interval _, ax1 = subplots() ax2 = ax1.twinx() ax1.set_xlabel('iteration') ax1.set_ylabel('train loss (r)') ax2.set_ylabel('train accuracy (b), val accuracy (g)') ax2.set_autoscaley_on(False) ax2.set_ylim([0, 1]) blobs = ('loss','acc') loss, acc = ({name: np.zeros(niter) for name, _ in solvers} for _ in blobs) #RUN TRAINING for it in range(niter): for name, s in solvers: # print "FC7 data" # print s.net.blobs['fc7'].data # print "Classifier weights" # print s.net.params['classifier'][0].data # print "Classifier data" # print s.net.blobs['classifier'].data # start = time.time() s.step(1) # run a single SGD step in Caffe # end = time.time() # print "Time step: " + str((end - start)) loss[name][it], acc[name][it] = (s.net.blobs[b].data.copy() for b in blobs) #PLOT if it % disp_interval == 0 or it + 1 == niter: loss_disp = '; '.join('%s: loss=%.3f, acc=%2d%%' % (n, loss[n][it], np.round(100*acc[n][it])) for n, _ in solvers) print '%3d) %s' % (it, loss_disp) train_loss[it/disp_interval] = loss['my_solver'][it] train_acc[it/disp_interval] = acc['my_solver'][it] ax1.plot(it_axes[0:it/disp_interval], train_loss[0:it/disp_interval], 'r') ax2.plot(it_axes[0:it/disp_interval], train_acc[0:it/disp_interval], 'b') plt.ion() plt.show() plt.pause(0.001) # title = '../training/numbers/training-' + str(it) + '.png' # Save graph to disk # savefig(title, bbox_inches='tight') #VALIDATE if it % test_interval == 0 and it > 0: accuracy = 0 for i in range(test_iters): solvers[0][1].test_nets[0].forward() accuracy += solvers[0][1].test_nets[0].blobs['acc'].data accuracy /= test_iters print("Test Accuracy: {:.3f}".format(accuracy)) val_acc[it/test_interval - 1] = accuracy ax2.plot(it_val_axes[0:it/test_interval], val_acc[0:it/test_interval], 'g') plt.ion() plt.show() plt.pause(0.001) title = '../../../datasets/SocialMedia/models/training/training-' + str(it) + '.png' # Save graph to disk savefig(title, bbox_inches='tight') #Save the learned weights from both nets at the end of the training weight_dir = tempfile.mkdtemp() weights = {} for name, s in solvers: filename = 'weights.%s.caffemodel' % name weights[name] = os.path.join(weight_dir, filename) s.net.save(weights[name]) return loss, acc, weights
def do_solve(niter, solvers, disp_interval, test_interval, test_iters, training_id): """Run solvers for niter iterations, returning the loss and recorded each iteration. `solvers` is a list of (name, solver) tuples.""" import tempfile import numpy as np import os from pylab import zeros, arange, subplots, plt, savefig import time # SET PLOTS DATA train_loss = zeros(niter / disp_interval) val_loss = zeros(niter / test_interval) it_axes = (arange(niter) * disp_interval) + disp_interval it_val_axes = (arange(niter) * test_interval) + test_interval _, ax1 = subplots() ax1.set_xlabel('iteration') ax1.set_ylabel('train loss (r), val loss (g)') loss = {name: np.zeros(niter) for name, _ in solvers} #RUN TRAINING for it in range(niter): for name, s in solvers: s.step(1) # run a single SGD step in Caffe loss[name][it] = s.net.blobs['loss3/loss3'].data.copy() #PLOT if it % disp_interval == 0 or it + 1 == niter: loss_disp = 'loss=' + str(loss['my_solver'][it]) print '%3d) %s' % (it, loss_disp) train_loss[it / disp_interval] = loss['my_solver'][it] ax1.plot(it_axes[0:it / disp_interval], train_loss[0:it / disp_interval], 'r') ax1.set_ylim([170, 210]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.0001) #VALIDATE if it % test_interval == 0 and it > 0: loss_val = 0 for i in range(test_iters): solvers[0][1].test_nets[0].forward() loss_val += solvers[0][1].test_nets[0].blobs[ 'loss3/loss3'].data loss_val /= test_iters print("Val loss: {:.3f}".format(loss_val)) val_loss[it / test_interval - 1] = loss_val ax1.plot(it_val_axes[0:it / test_interval], val_loss[0:it / test_interval], 'g') ax1.set_ylim([170, 210]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) title = '../../../hd/datasets/instaBarcelona/models/training/' + training_id + str( it) + '.png' savefig(title, bbox_inches='tight') #Save the learned weights from both nets at the end of the training weight_dir = tempfile.mkdtemp() weights = {} for name, s in solvers: filename = 'weights.%s.caffemodel' % name weights[name] = os.path.join(weight_dir, filename) s.net.save(weights[name]) return loss, weights
def do_plot(): if solver.iter % display == 0: lossC[solver.iter] = solver.net.blobs['loss3/loss3'].data.copy() acc1[solver.iter] = solver.net.blobs['loss3/top-1'].data.copy() acc5[solver.iter] = solver.net.blobs['loss3/top-5'].data.copy() loss_disp = 'loss3C= ' + str(lossC[solver.iter]) + ' top-1= ' + str(acc1[solver.iter]) print '%3d) %s' % (solver.iter, loss_disp) train_loss_C[solver.iter / display] = lossC[solver.iter] train_top1[solver.iter / display] = acc1[solver.iter] train_top5[solver.iter / display] = acc5[solver.iter] ax1.plot(it_axes[0:solver.iter / display], train_loss_C[0:solver.iter / display], 'r') ax2.plot(it_axes[0:solver.iter / display], train_top1[0:solver.iter / display], 'b') ax2.plot(it_axes[0:solver.iter / display], train_top5[0:solver.iter / display], 'c') ax1.set_ylim([0, 25]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) # VALIDATE Validation done this way only uses 1 GPU if solver.iter % test_interval == 0 and solver.iter > 0: loss_val_C = 0 top1_val = 0 top5_val = 0 for i in range(test_iters): solver.test_nets[0].forward() loss_val_C += solver.test_nets[0].blobs['loss3/loss3'].data top1_val += solver.test_nets[0].blobs['loss3/top-1'].data top5_val += solver.test_nets[0].blobs['loss3/top-5'].data loss_val_C /= test_iters top1_val /= test_iters top5_val /= test_iters print("Val loss: " + str(loss_val_C)) val_loss_C[solver.iter / test_interval - 1] = loss_val_C val_top1[solver.iter / test_interval - 1] = top1_val val_top5[solver.iter / test_interval - 1] = top5_val ax1.plot(it_val_axes[0:solver.iter / test_interval], val_loss_C[0:solver.iter / test_interval], 'y') ax2.plot(it_val_axes[0:solver.iter / test_interval], val_top1[0:solver.iter / test_interval], 'g') ax2.plot(it_val_axes[0:solver.iter / test_interval], val_top5[0:solver.iter / test_interval], 'k') ax1.set_ylim([0, 25]) ax1.set_xlabel('iteration ' + 'Best it: ' + str(best_it[0]) + ' Best Val Loss: ' + str(int(lowest_val_loss[0]))) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) title = '../../../ssd2/iMaterialistFashion/models/training/' + training_id + '_' + str(solver.iter) + '.png' savefig(title, bbox_inches='tight') if loss_val_C < lowest_val_loss[0]: print("Best Val loss!") lowest_val_loss[0] = loss_val_C best_it[0] = solver.iter filename = '../../../ssd2/iMaterialistFashion/models/CNN/' + training_id + 'best_valLoss_' + str( int(lowest_val_loss[0])) + '_it_' + str(best_it[0]) + '.caffemodel' prefix = 30 for cur_filename in glob.glob(filename[:-prefix] + '*'): print(cur_filename) os.remove(cur_filename) solver.net.save(filename)
def do_solve(maxIter, solver, display, test_interval, test_iters): # SET PLOTS DATA train_loss_C = zeros(maxIter / display) train_loss_R = zeros(maxIter / display) train_top1 = zeros(maxIter / display) train_top5 = zeros(maxIter / display) val_loss_C = zeros(maxIter / test_interval) val_loss_R = zeros(maxIter / test_interval) val_top1 = zeros(maxIter / test_interval) it_axes = (arange(maxIter) * display) + display it_val_axes = (arange(maxIter) * test_interval) + test_interval _, ax1 = subplots() ax2 = ax1.twinx() ax1.set_xlabel('iteration') ax1.set_ylabel( 'train loss C (r), val loss C (y), train loss R (m), val loss R (k)') ax2.set_ylabel('train TOP1 (b), val TOP1 (g), train TOP-5 (2) (c)') ax2.set_autoscaley_on(False) ax2.set_ylim([0, 1]) lossC = np.zeros(maxIter) lossR = np.zeros(maxIter) acc1 = np.zeros(maxIter) #RUN TRAINING for it in range(niter): #st = time.time() solver.step(1) # run a single SGD step in Caffepy() #en = time.time() #print "Time step: " + str((en-st)) #PLOT if it % display == 0 or it + 1 == niter: lossC[solver.iter] = solver.net.blobs['loss3/loss3'].data.copy() lossR[solver.iter] = solver.net.blobs['loss3/loss3/R'].data.copy() acc1[solver.iter] = solver.net.blobs['loss3/top-1'].data.copy() #acc5[solver.iter] = solver.net.blobs['loss2/top-5'].data.copy() loss_disp = 'loss3C= ' + str( lossC[solver.iter]) + ' loss3R= ' + str( lossR[solver.iter]) + ' top-1= ' + str(acc1[solver.iter]) print '%3d) %s' % (solver.iter, loss_disp) train_loss_C[solver.iter / display] = lossC[solver.iter] train_loss_R[solver.iter / display] = lossR[solver.iter] train_top1[solver.iter / display] = acc1[solver.iter] #train_top5[solver.iter / display] = acc5[solver.iter] ax1.plot(it_axes[0:solver.iter / display], train_loss_C[0:solver.iter / display], 'r') ax1.plot(it_axes[0:solver.iter / display], train_loss_R[0:solver.iter / display], 'm') ax2.plot(it_axes[0:solver.iter / display], train_top1[0:solver.iter / display], 'b') # ax2.plot(it_axes[0:solver.iter / display], train_top5[0:solver.iter / display], 'c') ax1.set_ylim([0, 10]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) #VALIDATE if it % test_interval == 0 and it > 0: loss_val_R = 0 loss_val_C = 0 top1_val = 0 for i in range(test_iters): solver.test_nets[0].forward() loss_val_C += solver.test_nets[0].blobs['loss3/loss3'].data loss_val_R += solver.test_nets[0].blobs['loss3/loss3/R'].data top1_val += solver.test_nets[0].blobs['loss3/top-1'].data loss_val_C /= test_iters loss_val_R /= test_iters top1_val /= test_iters print("Val loss C: {:.3f}".format(loss_val_C)) val_loss_C[solver.iter / test_interval - 1] = loss_val_C val_loss_R[solver.iter / test_interval - 1] = loss_val_R val_top1[solver.iter / test_interval - 1] = top1_val ax1.plot(it_val_axes[0:solver.iter / test_interval], val_loss_C[0:solver.iter / test_interval], 'y') ax1.plot(it_val_axes[0:solver.iter / test_interval], val_loss_R[0:solver.iter / test_interval], 'k') ax2.plot(it_val_axes[0:solver.iter / test_interval], val_top1[0:solver.iter / test_interval], 'g') ax1.set_ylim([0, 10]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) return
def do_solve(niter, solvers, disp_interval, test_interval, test_iters, training_id, batch_size): import tempfile import numpy as np import os from pylab import zeros, arange, subplots, plt, savefig import time # SET PLOTS DATA train_loss = zeros(niter / disp_interval) train_correct_pairs = zeros(niter / disp_interval) val_loss = zeros(niter / test_interval) val_correct_pairs = zeros(niter / test_interval) it_axes = (arange(niter) * disp_interval) + disp_interval it_val_axes = (arange(niter) * test_interval) + test_interval _, ax1 = subplots() ax2 = ax1.twinx() ax1.set_xlabel('iteration') ax1.set_ylabel('train loss (r), val loss (g)') ax2.set_ylabel('train correct pairs (b) val correct pairs (m)') ax2.set_autoscaley_on(False) ax2.set_ylim([0, batch_size]) loss = {name: np.zeros(niter) for name, _ in solvers} correct_pairs = {name: np.zeros(niter) for name, _ in solvers} #RUN TRAINING for it in range(niter): for name, s in solvers: # start = time.time() s.step(1) # run a single SGD step in Caffe # end = time.time() # print "Time step: " + str((end - start)) # print "Max before ReLU: " + str(np.max(s.net.blobs['inception_5b/pool_proj'].data)) # print "Max last FC: " + str(np.max(s.net.blobs['loss3/classifierCustom'].data)) loss[name][it] = s.net.blobs['loss3/loss3'].data.copy() correct_pairs[name][it] = s.net.blobs['correct_pairs'].data.copy() #PLOT if it % disp_interval == 0 or it + 1 == niter: loss_disp = 'loss=' + str( loss['my_solver'][it]) + ' correct_pairs=' + str( correct_pairs['my_solver'][it]) print '%3d) %s' % (it, loss_disp) train_loss[it / disp_interval] = loss['my_solver'][it] train_correct_pairs[it / disp_interval] = correct_pairs['my_solver'][it] ax1.plot(it_axes[0:it / disp_interval], train_loss[0:it / disp_interval], 'r') ax2.plot(it_axes[0:it / disp_interval], train_correct_pairs[0:it / disp_interval], 'b') # if it > test_interval: # ax1.plot(it_val_axes[0:it/test_interval], val_loss[0:it/test_interval], 'g') #Val always on top ax1.set_ylim([0, 0.05]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) # title = '../training/numbers/training-' + str(it) + '.png' # Save graph to disk # savefig(title, bbox_inches='tight') #VALIDATE if it % test_interval == 0 and it > 0: loss_val = 0 cur_correct_pairs = 0 for i in range(test_iters): solvers[0][1].test_nets[0].forward() loss_val += solvers[0][1].test_nets[0].blobs[ 'loss3/loss3'].data cur_correct_pairs += solvers[0][1].test_nets[0].blobs[ 'correct_pairs'].data loss_val /= test_iters cur_correct_pairs /= test_iters print("Val loss: " + str(loss_val) + " Val correct pairs: " + str(cur_correct_pairs)) val_loss[it / test_interval - 1] = loss_val val_correct_pairs[it / test_interval - 1] = cur_correct_pairs ax1.plot(it_val_axes[0:it / test_interval], val_loss[0:it / test_interval], 'g') ax2.plot(it_val_axes[0:it / test_interval], val_correct_pairs[0:it / test_interval], 'm') ax1.set_ylim([0, 0.05]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) title = '../../../hd/datasets/landmarks_recognition/models/training/' + training_id + str( it) + '.png' # Save graph to disk savefig(title, bbox_inches='tight') #Save the learned weights from both nets at the end of the training weight_dir = tempfile.mkdtemp() weights = {} for name, s in solvers: filename = 'weights.%s.caffemodel' % name weights[name] = os.path.join(weight_dir, filename) s.net.save(weights[name]) return loss, weights
def do_solve(niter, solver, disp_interval, test_interval, test_iters, training_id): """Run solvers for niter iterations, returning the loss and recorded each iteration. `solvers` is a list of (name, solver) tuples.""" import tempfile import numpy as np import os from pylab import zeros, arange, subplots, plt, savefig import time import glob # SET PLOTS DATA train_loss = zeros(niter/disp_interval) val_loss = zeros(niter/test_interval) it_axes = (arange(niter) * disp_interval) + disp_interval it_val_axes = (arange(niter) * test_interval) + test_interval _, ax1 = subplots() # ax2 = ax1.twinx() ax1.set_xlabel('iteration') ax1.set_ylabel('train loss (r), val loss (g)') # ax2.set_ylabel('val loss (g)') # ax2.set_autoscaley_on(False) # ax2.set_ylim([0, 1]) lowest_val_loss = 1000 best_it = 0 loss = np.zeros(niter) #RUN TRAINING for it in range(niter): # start = time.time() solver.step(1) # run a single SGD step in Caffe # end = time.time() # print "Time step: " + str((end - start)) loss[it] = solver.net.blobs['loss3/loss3'].data.copy() #PLOT if it % disp_interval == 0 or it + 1 == niter: loss_disp = 'loss=' + str(loss[it]) print '%3d) %s' % (it, loss_disp) train_loss[it/disp_interval] = loss[it] ax1.plot(it_axes[0:it/disp_interval], train_loss[0:it/disp_interval], 'r') # if it > test_interval: # ax1.plot(it_val_axes[0:it/test_interval], val_loss[0:it/test_interval], 'g') #Val always on top ax1.set_ylim([int(lowest_val_loss) - 1,int(lowest_val_loss) + 4]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) # title = '../training/numbers/training-' + str(it) + '.png' # Save graph to disk # savefig(title, bbox_inches='tight') #VALIDATE if it % test_interval == 0 and it > 0: loss_val = 0 for i in range(test_iters): solver.test_nets[0].forward() loss_val += solver.test_nets[0].blobs['loss3/loss3'].data loss_val /= test_iters print("Val loss: {:.3f}".format(loss_val)) val_loss[it/test_interval - 1] = loss_val ax1.plot(it_val_axes[0:it/test_interval], val_loss[0:it/test_interval], 'g') ax1.set_ylim([int(lowest_val_loss) - 1,int(lowest_val_loss) + 4]) ax1.set_xlabel('iteration ' + 'Best it: ' + str(best_it) + ' Best Val Loss: ' + str(int(lowest_val_loss))) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) title = '../../../hd/datasets/instaMiro/models/training/' + training_id + str(it) + '.png' # Save graph to disk savefig(title, bbox_inches='tight') if loss_val < lowest_val_loss: print("Best Val loss!") lowest_val_loss = loss_val best_it = it filename = '../../../hd/datasets/instaMiro/models/CNNRegression/' + training_id + '_best_valLoss_' + str(int(loss_val)) +'_it_' + str(it) + '.caffemodel' prefix = 30 for cur_filename in glob.glob(filename[:-prefix] + '*'): print(cur_filename) os.remove(cur_filename) solver.net.save(filename)
prefix_len = len('_epoch_' + str(epoch) + '_ValLossNotBest_' + str(round(plot_data['val_loss'][epoch], 3))) train.save_checkpoint(model, filename, prefix_len) if plot: ax1.plot(it_axes[0:epoch + 1], plot_data['train_loss'][0:epoch + 1], 'r') ax2.plot(it_axes[0:epoch + 1], plot_data['train_correct_pairs'][0:epoch + 1], 'b') ax1.plot(it_axes[0:epoch + 1], plot_data['val_loss'][0:epoch + 1], 'y') ax2.plot(it_axes[0:epoch + 1], plot_data['val_correct_pairs'][0:epoch + 1], 'g') plt.title(training_id + str(round(variance, 4)), fontsize=10) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) # Save graph to disk if epoch % 1 == 0 and epoch != 0: title = dataset + '/training/' + training_id + '_epoch_' + str( epoch) + '_var_' + str(round(variance, 4)) + '.png' savefig(title, bbox_inches='tight') variance += variance_step print("Finished Training, saving checkpoint") filename = dataset + '/models/' + training_id + '_epoch_' + str(epoch) prefix_len = len('_epoch_' + str(epoch) + '_ValLoss_' +
def do_solve(niter, solver, disp_interval, test_interval, test_iters, training_id, batch_size): """Run solvers for niter iterations, returning the loss and recorded each iteration. `solvers` is a list of (name, solver) tuples.""" import tempfile import numpy as np import os from pylab import zeros, arange, subplots, plt, savefig import glob import time # SET PLOTS DATA # train_loss = zeros(niter/disp_interval) train_loss_r = zeros(niter / disp_interval) train_correct_pairs = zeros(niter / disp_interval) # train_acc = zeros(niter/disp_interval) # val_loss = zeros(niter/test_interval) val_loss_r = zeros(niter / test_interval) val_correct_pairs = zeros(niter / test_interval) # val_acc = zeros(niter/test_interval) it_axes = (arange(niter) * disp_interval) + disp_interval it_val_axes = (arange(niter) * test_interval) + test_interval _, ax1 = subplots() ax2 = ax1.twinx() ax1.set_xlabel('iteration') ax1.set_ylabel( 'train loss (r), val loss (g),') # train loss_r (c), val loss_r (o)') ax2.set_ylabel('train correct pairs (b) val correct pairs (m)' ) # train top1 (y) val top1 (bk)') ax2.set_autoscaley_on(False) ax2.set_ylim([0, batch_size]) # loss = {name: np.zeros(niter) for name, _ in solvers} loss_r = np.zeros(niter) correct_pairs = np.zeros(niter) # acc = {name: np.zeros(niter) for name, _ in solvers} lowest_val_loss = 1000 best_it = 0 #RUN TRAINING for it in range(niter): # start = time.time() solver.step(1) # run a single SGD step in Caffe # end = time.time() # print "Time step: " + str((end - start)) # print "Max before ReLU: " + str(np.max(s.net.blobs['inception_5b/pool_proj'].data)) # print "Max last FC: " + str(np.max(s.net.blobs['loss3/classifierCustom'].data)) #loss[name][it] = s.net.blobs['loss3/loss3/classification'].data.copy() loss_r[it] = solver.net.blobs['loss3/loss3/ranking'].data.copy() correct_pairs[it] = solver.net.blobs['correct_pairs'].data.copy() # acc[name][it] = s.net.blobs['loss3/top-1'].data.copy() #PLOT if it % disp_interval == 0 or it + 1 == niter: # loss_disp = 'loss=' + str(loss['my_solver'][it]) + ' correct_pairs=' + str(correct_pairs['my_solver'][it]) + ' loss ranking=' + str(loss_r['my_solver'][it]) loss_disp = ' correct_pairs=' + str( correct_pairs[it]) + ' loss ranking=' + str(loss_r[it]) print '%3d) %s' % (it, loss_disp) # train_loss[it/disp_interval] = loss[it] train_loss_r[it / disp_interval] = loss_r[it] train_correct_pairs[it / disp_interval] = correct_pairs[it] # train_acc[it/disp_interval] = acc[it] *120 # ax1.plot(it_axes[0:it/disp_interval], train_loss[0:it/disp_interval], 'r') ax1.plot(it_axes[0:it / disp_interval], train_loss_r[0:it / disp_interval], 'c') ax2.plot(it_axes[0:it / disp_interval], train_correct_pairs[0:it / disp_interval], 'b') # ax2.plot(it_axes[0:it/disp_interval], train_acc[0:it/disp_interval], 'gold') # if it > test_interval: # ax1.plot(it_val_axes[0:it/test_interval], val_loss[0:it/test_interval], 'g') #Val always on top ax1.set_ylim([0, 2]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) # title = '../training/numbers/training-' + str(it) + '.png' # Save graph to disk # savefig(title, bbox_inches='tight') #VALIDATE if it % test_interval == 0 and it > 0: # loss_val = 0 loss_val_r = 0 cur_correct_pairs = 0 # cur_acc = 0 for i in range(test_iters): solver.test_nets[0].forward() # loss_val += solver.test_nets[0].blobs['loss3/loss3/classification'].data loss_val_r += solver.test_nets[0].blobs[ 'loss3/loss3/ranking'].data cur_correct_pairs += solver.test_nets[0].blobs[ 'correct_pairs'].data # cur_acc += solvers[0][1].test_nets[0].blobs['loss3/top-1'].data # loss_val /= test_iters loss_val_r /= test_iters cur_correct_pairs /= test_iters # cur_acc /= test_iters # cur_acc *= 120 # print("Val loss: " + str(loss_val) + " Val correct pairs: " + str(cur_correct_pairs) + " Val loss ranking: " + str(loss_val_r) + "Val acc: "+ str(cur_acc)) print(" Val correct pairs: " + str(cur_correct_pairs) + " Val loss ranking: " + str(loss_val_r)) # val_loss[it/test_interval - 1] = loss_val val_loss_r[it / test_interval - 1] = loss_val_r val_correct_pairs[it / test_interval - 1] = cur_correct_pairs # val_acc[it/test_interval - 1] = cur_acc # ax1.plot(it_val_axes[0:it/test_interval], val_loss[0:it/test_interval], 'g') ax1.plot(it_val_axes[0:it / test_interval], val_loss_r[0:it / test_interval], 'orange') ax2.plot(it_val_axes[0:it / test_interval], val_correct_pairs[0:it / test_interval], 'm') # ax2.plot(it_val_axes[0:it/test_interval], val_acc[0:it/test_interval], 'k') ax1.set_ylim([0, 2]) ax1.set_xlabel('iteration ' + 'Best it: ' + str(best_it) + ' Best Val Loss: ' + str(int(lowest_val_loss))) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) title = '../../../hd/datasets/instaFashion/models/training/' + training_id + str( it) + '.png' # Save graph to disk savefig(title, bbox_inches='tight') if loss_val_r < lowest_val_loss: print("Best Val loss!") lowest_val_loss = loss_val_r best_it = it filename = '../../../hd/datasets/instaFashion/models/CNNContrastive/' + training_id + 'best_valLoss_' + str( int(loss_val_r)) + '_it_' + str(it) + '.caffemodel' prefix = 30 for cur_filename in glob.glob(filename[:-prefix] + '*'): print(cur_filename) os.remove(cur_filename) solver.net.save(filename)
#!/usr/bin/env python from pylab import arange, plt from drawnow import drawnow import numpy as np plt.ion() # enable interactivity fig = plt.figure() # make a figure def makeFig(): plt.scatter(x, y) # I think you meant this x = list() y = list() for i in arange(1000): temp_y = np.random.random() x.append(i) y.append(temp_y) # or any arbitrary update to your figure's data i += 1 drawnow(makeFig)
import os import sys import ping from pylab import arange, plt import drawnow as drawn import numpy as np __author__ = "David Cotterill-Drew" __copyright__ = "Copyright 2014, RoboTonics" __credits__ = ["David Cotterill-Drew"] __license__ = "GPL" __version__ = "2.0" __maintainer__ = __author__ __email__ = "*****@*****.**" plt.ion() # enable interactivity fig=plt.figure() # make a figure def makeFig(): plt.scatter(x,y) x=list() y=list() for i in arange(1000): temp_y=np.random.random() x.append(i) y.append(temp_y) i+=1 drawn(makeFig)