def reverse_3(input, original): original= array(original) #ideally, we would have a recursive function that #would calculate four indices based on the top left corner #and then calculate the rest, but I take the simple route noise = input - original smallsquare = noise[:noise.shape[0]/float(2),:noise.shape[1]/float(2)] byrow = append(smallsquare, smallsquare, 0) bycolumnrow = append(byrow, byrow, 1) clean = input-bycolumnrow return clean
def get_data_timeline(out_path,err_path): f = open(out_path, "r") max_time=0 for line in f: arr=line.split(",") if("Size:" in arr[0]): # print(arr) if(RepresentsFloat(arr[3].replace('sec=','')) == True): max_time=float(arr[3].replace('sec=','')) break f.close() print(max_time) f = open(err_path, "r") time_line=np.arange(0, int(max_time)+2, 0.5) bandwith_list=[] count=len(time_line) for line in f: arr=line.split(" ") if( RepresentsInt(arr[0]) and count!=0): bandwith_list= append(bandwith_list,float(arr[8])) count-=1 if(count==0): break f.close() print(bandwith_list) return time_line,bandwith_list
def matlab2PointCorrespondences(filename): '''Loads and converts the point correspondences saved by the matlab camera calibration tool''' from numpy.lib.io import loadtxt, savetxt from numpy.lib.function_base import append points = loadtxt(filename, delimiter=',') savetxt( utils.removeExtension(filename) + '-point-correspondences.txt', append(points[:, :2].T, points[:, 3:].T, axis=0))
def add_point(self, y, x=None, z=1): y = float(y) self.moving_history.append(y) self.moving_history = self.moving_history[-self.smoothing:] print(self.moving_history) y = mean(self.moving_history) print y if not x: if z in self.line: x = self.line[z].get_xdata()[-1] + 1 else: x = 1 print x try: self.line[z].set_xdata(append(self.line[z].get_xdata(), x)) self.line[z].set_ydata(append(self.line[z].get_ydata(), y)) except KeyError: self.line[z], = self.axes.plot([x],[y], linestyle=str(self.ls), marker=str(self.marker))
def matlab2PointCorrespondences(filename): """Loads and converts the point correspondences saved by the matlab camera calibration tool""" from numpy.lib.io import loadtxt, savetxt from numpy.lib.function_base import append points = loadtxt(filename, delimiter=",") savetxt( utils.removeExtension(filename) + "-point-correspondences.txt", append(points[:, :2].T, points[:, 3:].T, axis=0) )
def get_data_timeline(out_path, err_path): f = open(out_path, "r") max_time = 0 for line in f: line = line.strip() arr = line.split(",") if ("Size:" in arr[0]): print(arr) if (RepresentsFloat(arr[3].replace('sec=', '')) == True): max_time = float(arr[3].replace('sec=', '')) break f.close() print(max_time) f = open(err_path, "r") time_line = np.arange(0, int(max_time) + 2, 0.5) bandwith_list = [] count = len(time_line) id_mem_bandwidth = 12 id_l3_cache = 8 for line in f: arr = line.split(" ") if (RepresentsInt(arr[0]) and count != 0): if (TEST == "MEM"): bandwith_list = append(bandwith_list, float(arr[id_mem_bandwidth])) elif (TEST == "L3"): bandwith_list = append(bandwith_list, float(arr[id_l3_cache])) count -= 1 if (count == 0): break f.close() # print(count) print(bandwith_list) return time_line, bandwith_list
def projectArray(homography, points): from numpy.core import dot from numpy.lib.function_base import append if points.shape[0] != 2: raise Exception('points of dimension {0} {1}'.format(points.shape[0], points.shape[1])) if (homography is not None) and homography.size>0: augmentedPoints = append(points,[[1]*points.shape[1]], 0) prod = dot(homography, augmentedPoints) return prod[0:2]/prod[2] else: return points
def projectArray(homography, points): '''Returns the coordinates of the projected points (format 2xN points) through homography''' from numpy.core import dot from numpy.core.multiarray import array from numpy.lib.function_base import append if points.shape[0] != 2: raise Exception('points of dimension {0} {1}'.format( points.shape[0], points.shape[1])) if (homography != None) and homography.size > 0: augmentedPoints = append(points, [[1] * points.shape[1]], 0) prod = dot(homography, augmentedPoints) return prod[0:2] / prod[2] else: return p
def projectArray(homography, points): """Returns the coordinates of the projected points through homography (format: array 2xN points)""" from numpy.core import dot from numpy.core.multiarray import array from numpy.lib.function_base import append if points.shape[0] != 2: raise Exception("points of dimension {0} {1}".format(points.shape[0], points.shape[1])) if (homography is not None) and homography.size > 0: # alternatively, on could use cv2.convertpointstohomogeneous and other conversion to/from homogeneous coordinates augmentedPoints = append(points, [[1] * points.shape[1]], 0) prod = dot(homography, augmentedPoints) return prod[0:2] / prod[2] else: return points
def modelling_cycle(): #--------------- initialization -------------------# # initial_data = test_data initial_data = test_data_one # fig_init = plt.figure() # fig_init.canvas.manager.set_window_title('Initial data') # plt.plot(initial_data, color='g') wavelet_families = pywt.families() print 'Wavelet families:', ', '.join(wavelet_families) wavelet_family = wavelet_families[4] selected_wavelet = pywt.wavelist(wavelet_family)[0] wavelet = pywt.Wavelet(selected_wavelet) print 'Selected wavelet:', selected_wavelet max_level = pywt.swt_max_level(len(initial_data)) # decomposition_level = max_level / 2 decomposition_level = 3 print 'Max level:', max_level, '\t Decomposition level:', decomposition_level #--------------- decomposition -------------------# w_initial_coefficients = pywt.swt(initial_data, wavelet, level=decomposition_level) w_selected_coefficiets = select_levels_from_swt(w_initial_coefficients) w_node_coefficients = select_node_levels_from_swt(w_initial_coefficients) #something terribly wrong here, yet the rest works! #------------------ threshold --------------------# threshold = measure_threshold(w_initial_coefficients) w_threshold_coeff = w_initial_coefficients[:] apply_threshold(w_threshold_coeff) plot_initial_updated(w_initial_coefficients, w_threshold_coeff) # plt.figure() # for coeff in w_selected_coefficiets: # plt.plot(coeff) # plt.figure() # for coeff in w_node_coefficients: # plt.plot(coeff) # plt.show() #--------------- modification -------------------# r = R() w_new_coefficients = [0] * len(w_selected_coefficiets) for index in range(0, len(w_selected_coefficiets)): r.i_data = w_selected_coefficiets[index] r('hw <- HoltWinters( ts(i_data, frequency = 12), gamma = TRUE )') r('pred <- predict(hw, 50, prediction.interval = TRUE)') w_new_coefficients[index] = append(w_selected_coefficiets[index], r.pred[:,0]) index += 1 w_new_node_coefficients = [0] * len(w_node_coefficients) for index in range(0, len(w_node_coefficients)): r.i_data = w_node_coefficients[index] r('hw <- HoltWinters( ts(i_data, frequency = 12), gamma = TRUE )') r('pred <- predict(hw, 50, prediction.interval = TRUE)') w_new_node_coefficients[index] = append(w_node_coefficients[index], r.pred[:,0]) index += 1 #---- # plt.figure() # for coeff in w_new_coefficients: # plt.plot(coeff) # plt.figure() # for coeff in w_new_node_coefficients: # plt.plot(coeff) # plt.show() #--------------- reconstruction -------------------# # wInitialwithUpdated_Nodes = update_node_levels_swt(w_initial_coefficients, w_new_node_coefficients) # plot_initial_updated(w_initial_coefficients, w_new_node_coefficients, True) # plot_initial_updated(w_initial_coefficients, wInitialwithUpdated_Nodes) (!) # plt.figure() # for dyad in wInitialwithUpdated_Nodes: # plt.plot(dyad[0]) # plt.plot(dyad[1]) # # plt.figure() # for dyad in w_initial_coefficients: # plt.plot(dyad[0]) # plt.plot(dyad[1]) # # plt.show() # w_updated_coefficients = update_selected_levels_swt(w_initial_coefficients, w_selected_coefficiets) # w_updated_coefficients = update_selected_levels_swt(w_initial_coefficients, w_new_coefficients) #---- # w_updated_coefficients = update_swt(w_initial_coefficients, w_selected_coefficiets, w_node_coefficients) w_updated_coefficients_nodes = update_swt(w_initial_coefficients, w_new_coefficients, w_new_node_coefficients) w_updated_coefficients = update_selected_levels_swt(w_initial_coefficients, w_new_coefficients) plot_initial_updated(w_initial_coefficients, w_updated_coefficients_nodes) plot_initial_updated(w_initial_coefficients, w_updated_coefficients) reconstructed_Stationary_nodes = iswt(w_updated_coefficients_nodes, selected_wavelet) reconstructed_Stationary = iswt(w_updated_coefficients, selected_wavelet) fig_sta_r = plt.figure() fig_sta_r.canvas.manager.set_window_title('SWT reconstruction') plt.plot(reconstructed_Stationary) fig_sta_r_n = plt.figure() fig_sta_r_n.canvas.manager.set_window_title('SWT reconstruction (nodes)') plt.plot(reconstructed_Stationary_nodes) plt.show()
def __modelling_cycle(): initial_data = test_data fig_init = plt.figure() fig_init.canvas.manager.set_window_title('Initial data') plt.plot(initial_data, color='g') #--------------- wavelet decomposition -------------------# decomposition_level = 2 wavelet_families = pywt.families() wavelet_family = wavelet_families[0] selected_wavelet = pywt.wavelist(wavelet_family)[0] wavelet = pywt.Wavelet(selected_wavelet) #NB: taking first variant of wavelet (e.g. haar1) # discrete (non stationary) multilevel decomposition wCoefficients_Discrete = pywt.wavedec(initial_data, wavelet, level=decomposition_level) #NB: output length also depends on wavelet type # stationary (Algorithme à trous ~ does not decimate coefficients at every transformation level) multilevel decomposition wCoefficients_Stationary = pywt.swt(initial_data, wavelet, level=decomposition_level) fig_discrete = plt.figure(); n_coeff = 1 fig_discrete.canvas.manager.set_window_title('Discrete decomposition [ ' + str(decomposition_level) + ' level(s) ]') for coeff in wCoefficients_Discrete: # print coeff fig_discrete.add_subplot(len(wCoefficients_Discrete), 1, n_coeff); n_coeff += 1 plt.plot(coeff) fig_stationary = plt.figure(); n_coeff = 1; rows = 0 fig_stationary.canvas.manager.set_window_title('Stationary decomposition [ ' + str(decomposition_level) + ' level(s) ]') for item in wCoefficients_Stationary: rows += len(item) i = 0; j = 0 # tree coeffs for coeff in wCoefficients_Stationary: for subcoeff in coeff: print i, j # print subcoeff fig_stationary.add_subplot(rows, 1, n_coeff); n_coeff += 1 plt.plot(subcoeff) j += 1 i += 1 plt.show() fig_stat_sum = plt.figure(); n_coeff = 1 fig_stat_sum.canvas.manager.set_window_title('SWT sum by levels [ ' + str(decomposition_level) + ' level(s) ]') for coeff in wCoefficients_Stationary: sum = coeff[0] + coeff[1] fig_stat_sum.add_subplot(len(wCoefficients_Discrete), 1, n_coeff); n_coeff += 1 plt.plot(sum) # plt.show() #------------------ modelling by level -------------------# r = R() r.i_data = initial_data # or r['i_data'] = initial_data ### Holt-Winters ### # non-seasonal Holt-Winters print r('hw <- HoltWinters( i_data, gamma = FALSE )') # seasonal Holt-Winters r.freq = 4 #series sampling (month, days, years, etc) # print r( 'hw <- HoltWinters( ts ( %s, frequency = %s ) )' % ( Str4R(r.i_data), Str4R(r.freq) ) ) # print r( 'hw <- HoltWinters( ts ( %s, frequency = %s, start = c(1,1) ) )' % ( Str4R(r.i_data), Str4R(r.freq) ) ) # resulting Square Estimation Sum print r.hw['SSE'] # bruteforce frequency search # print 'test ahead:' # sse_dict = {} # for i in xrange(2, 50): # r.freq = i ## r( 'hw <- HoltWinters( ts ( %s, frequency = %s, start = c(1,1) ) )' % ( Str4R(r.i_data), Str4R(r.freq) ) ) # r( 'hw <- HoltWinters( ts ( %s, frequency = %s ) )' % ( Str4R(r.i_data), Str4R(r.freq) ) ) # print r.hw['SSE'] # sse_dict[r.hw['SSE']] = i; i += 1 # print 'Resulting:' # m = min(sse_dict.keys()) # print sse_dict[m], m fig = plt.figure() fig.canvas.manager.set_window_title('Holt-winters model') ax = fig.add_subplot(111) # ax.plot(r.hw['fitted'][:,0]) # the colums are: xhat, level, trend # plt.show() # forecast length r.steps_ahead = 50 # print r('pred <- predict(%s, %s, prediction.interval = TRUE)' % ( Str4R(r.hw), Str4R(r.steps_ahead)) ) # print r( 'pred <- predict(hw, %s, prediction.interval = TRUE)', Str4R(r.steps_ahead) ) print r( 'pred <- predict(hw, 50, prediction.interval = TRUE)') # plt.plot(r.pred) ax.plot(initial_data) ax.plot(append(r.hw['fitted'][:,0], r.pred[:,0])) # concatenating reconstructed model and resulting forecast # plt.show() #------------------ reconstruction -------------------# # multilevel idwt reconstructed_Discrete = pywt.waverec(wCoefficients_Discrete, selected_wavelet) fig_dis_r = plt.figure() fig_dis_r.canvas.manager.set_window_title('DWT reconstruction') plt.plot(reconstructed_Discrete) # plt.show() # multilevel stationary reconstructed_Stationary = iswt(wCoefficients_Stationary, selected_wavelet) fig_sta_r = plt.figure() fig_sta_r.canvas.manager.set_window_title('SWT reconstruction') plt.plot(reconstructed_Stationary) plt.show() print 'end'
max_time=float(arr[3].replace('sec=','')) break f.close() print(max_time) f = open(res_path, "r") time_line=np.arange(0, int(max_time+1), 0.5) bandwith_list=[] for line in f: arr=line.split(" ") if( RepresentsInt(arr[0])): bandwith_list= append(bandwith_list,float(arr[8])) f.close() print(time_line) print(bandwith_list) plt.ylabel("L3 load bandwidth [MBytes/s]") plt.xlabel("Time") # plt.yscale('log') plt.plot(time_line, bandwith_list, label="membench") plt.legend() plt.savefig('plot/plot_bandwith.png') plt.show()
max_time = float(arr[3].replace('sec=', '')) break f.close() print(max_time) f = open(res_path, "r") time_line = np.arange(0, int(max_time + 2), 0.5) bandwith_list = [] id_mem_bandwidth = 12 id_l3_cache = 8 for line in f: arr = line.split(" ") if (RepresentsInt(arr[0])): if (TEST == "MEM"): bandwith_list = append(bandwith_list, float(arr[id_mem_bandwidth])) elif (TEST == "L3"): bandwith_list = append(bandwith_list, float(arr[id_l3_cache])) f.close() # print(time_line) bandwith_list = np.append(bandwith_list, np.zeros(len(time_line) - len(bandwith_list))) print(len(time_line)) print(len(bandwith_list)) plt.ylabel(TEST + " bandwidth [MBytes/s]") plt.xlabel("Time") # plt.yscale('log') plt.plot(time_line, bandwith_list, label="membench")