def get_changed_entries(self): """ get_changed_entries returns all changed entries in a given dataset """ if self.new_data and self.all_data: for i in self.new_data: try: data = self.find_in_data( self.all_data, self.key, i[self.key] ) except IndexError: continue if data: data_copy = {} for key, value in data.iteritems(): if not key.startswith("_") and key != "date": data_copy[key] = value i_copy = copy(i) del(i_copy["date"]) if i_copy != data_copy: master = self.__master_string( self.tablename, i_copy[self.key] ) data["date"] = i["date"] for key, value in i_copy.iteritems(): if i[key] != data[key]: changed_field = key diff_string = diff(str(i[key]), str(data[key])) string = self.__diff_string( changed_field, i[key], data, key, diff_string, ) data[key] = i[key] master += string print master self.orm.update(data)
def get_changed_entries(self): """ get_changed_entries returns all changed entries in a given dataset """ if self.new_data and self.all_data: for i in self.new_data: try: data = self.find_in_data(self.all_data, self.key, i[self.key]) except IndexError: continue if data: data_copy = {} for key, value in data.iteritems(): if not key.startswith("_") and key != "date": data_copy[key] = value i_copy = copy(i) del (i_copy["date"]) if i_copy != data_copy: master = self.__master_string(self.tablename, i_copy[self.key]) data["date"] = i["date"] for key, value in i_copy.iteritems(): if i[key] != data[key]: changed_field = key diff_string = diff(str(i[key]), str(data[key])) string = self.__diff_string( changed_field, i[key], data, key, diff_string, ) data[key] = i[key] master += string print master self.orm.update(data)
def calibrate(self): gdf = calibration.GyroscopeDataFile(self.csv) gdf.parse() signal_x = gdf.get_signal_x() signal_y = gdf.get_signal_y() signal_z = gdf.get_signal_z() timestamps = gdf.get_timestamps() # Smooth out the noise smooth_signal_x = self.gaussian_filter(signal_x) smooth_signal_y = self.gaussian_filter(signal_y) smooth_signal_z = self.gaussian_filter(signal_z) # g_file = open('g.txt', 'w') # # for i in range(0, len(smooth_signal_x), 1): # g_file.write("%lf %lf %lf %d\n" % (smooth_signal_x.item(i),smooth_signal_y.item(i),smooth_signal_z.item(i),timestamps[i])) # # g_file.close() # render_trio(signal_x, signal_y, signal_z, timestamps) # render_trio(smooth_signal_x, smooth_signal_y, smooth_signal_z, timestamps) # g is the difference between the smoothed version and the actual version g = [ [], [], [] ] delta_g = [ [], [], [] ] delta_g[0] = np.subtract(signal_x, smooth_signal_x).tolist() delta_g[1] = np.subtract(signal_y, smooth_signal_y).tolist() delta_g[2] = np.subtract(signal_z, smooth_signal_z).tolist() g[0] = signal_x #np.subtract(signal_x, smooth_signal_x).tolist() g[1] = signal_y #np.subtract(signal_y, smooth_signal_y).tolist() g[2] = signal_z #np.subtract(signal_z, smooth_signal_z).tolist() dgt = utilities.diff(timestamps) theta = [ [], [], [] ] delta_theta = [ [], [], [] ] for component in [0, 1, 2]: sum_of_consecutives = np.add(g[component][:-1], g[component][1:]) # The 2 is for the integration - and 10e9 for the nanosecond dx_0 = np.divide(sum_of_consecutives, 2 * 1000000000) num_0 = np.multiply(dx_0, dgt) theta[component] = [0] theta[component].extend(np.cumsum(num_0)) sum_of_delta_consecutives = np.add(delta_g[component][:-1], delta_g[component][1:]) dx_0 = np.divide(sum_of_delta_consecutives, 2 * 1000000000) num_0 = np.multiply(dx_0, dgt) delta_theta[component] = [0] delta_theta[component].extend(np.cumsum(num_0)) parts = self.mp4.split("/") pickle_file_name = parts[-1].split(".")[0] pickle_full_path = ".%s/%s.pickle" % ("/".join(parts[:-1]), pickle_file_name) print("Pickle file = %s" % pickle_full_path) import pickle videoObj = None if not os.path.exists(pickle_full_path): print("Pickle file not found - generating it") videoObj = GyroVideo(self.mp4) videoObj.read_video() fp = open(pickle_full_path, "w") pickle.dump(videoObj, fp) fp.close() else: fp = open(pickle_full_path, "r") videoObj = pickle.load(fp) fp.close() print "Calibrating parameters" print "=====================+" parameters = np.asarray([1000, 112859286.844093, 0.0, 0.0, 0.0, -32763211.985663]) import scipy.optimize result = scipy.optimize.minimize(self.calcErrorAcrossVideoObjective, parameters, (videoObj, theta, timestamps), 'Nelder-Mead', tol=0.001) print result focal_length = result['x'][0] gyro_delay = result['x'][1] gyro_drift = ( result['x'][2], result['x'][3], result['x'][4] ) shutter_duration = result['x'][5] print "Focal length = %f" % focal_length print "Gyro delay = %f" % gyro_delay print "Gyro drift = (%f, %f, %f)" % gyro_drift print "Shutter duration= %f" % shutter_duration # Smooth out the delta_theta values - they must be fluctuating like crazy smooth_delta_x = self.gaussian_filter(delta_theta[0], 128, 16) smooth_delta_y = self.gaussian_filter(delta_theta[1], 128, 16) smooth_delta_z = self.gaussian_filter(delta_theta[2], 128, 16) return (delta_theta, timestamps, focal_length, gyro_delay, gyro_drift, shutter_duration)
def inputs(self): gdf = calibration.GyroscopeDataFile(self.csv) gdf.parse() signal_x = gdf.get_signal_x() signal_y = gdf.get_signal_y() signal_z = gdf.get_signal_z() timestamps = gdf.get_timestamps() # Smooth out the noise smooth_signal_x = self.gaussian_filter(signal_x) smooth_signal_y = self.gaussian_filter(signal_y) smooth_signal_z = self.gaussian_filter(signal_z) # g_file = open('g.txt', 'w') # # for i in range(0, len(smooth_signal_x), 1): # g_file.write("%lf %lf %lf %d\n" % (smooth_signal_x.item(i),smooth_signal_y.item(i),smooth_signal_z.item(i),timestamps[i])) # # g_file.close() render_trio(signal_x, signal_y, signal_z, timestamps) render_trio(smooth_signal_x, smooth_signal_y, smooth_signal_z, timestamps) # g is the difference between the smoothed version and the actual version g = [ [], [], [] ] delta_g = [ [], [], [] ] delta_g[0] = np.subtract(signal_x, smooth_signal_x).tolist() delta_g[1] = np.subtract(signal_y, smooth_signal_y).tolist() delta_g[2] = np.subtract(signal_z, smooth_signal_z).tolist() g[0] = signal_x #np.subtract(signal_x, smooth_signal_x).tolist() g[1] = signal_y #np.subtract(signal_y, smooth_signal_y).tolist() g[2] = signal_z #np.subtract(signal_z, smooth_signal_z).tolist() dgt = utilities.diff(timestamps) theta = [ [], [], [] ] delta_theta = [ [], [], [] ] for component in [0, 1, 2]: sum_of_consecutives = np.add(g[component][:-1], g[component][1:]) # The 2 is for the integration - and 10e9 for the nanosecond dx_0 = np.divide(sum_of_consecutives, 2 * 1000000000) num_0 = np.multiply(dx_0, dgt) theta[component] = [0] theta[component].extend(np.cumsum(num_0)) sum_of_delta_consecutives = np.add(delta_g[component][:-1], delta_g[component][1:]) dx_0 = np.divide(sum_of_delta_consecutives, 2 * 1000000000) num_0 = np.multiply(dx_0, dgt) delta_theta[component] = [0] delta_theta[component].extend(np.cumsum(num_0)) # UNKNOWNS focal_length = 1080.0 gyro_delay = 0 gyro_drift = (0, 0, 0) shutter_duration = 0 parts = self.mp4.split("/") pickle_file_name = parts[-1].split(".")[0] pickle_full_path = ".%s/%s.pickle" % ("/".join(parts[:-1]), pickle_file_name) print("Pickle file = %s" % pickle_full_path) import pickle videoObj = None if not os.path.exists(pickle_full_path): print("Pickle file not found - generating it") videoObj = GyroVideo(self.mp4) videoObj.read_video() fp = open(pickle_full_path, "w") pickle.dump(videoObj, fp) fp.close() else: fp = open(pickle_full_path, "r") videoObj = pickle.load(fp) fp.close() return (delta_theta, timestamps)
def calibrate(self): gdf = self.__import_gyro_data() signal_x = gdf.get_signal_x() signal_y = gdf.get_signal_y() signal_z = gdf.get_signal_z() timestamps = gdf.get_timestamps() # Smooth out the noise smooth_signal_x = self.gaussian_filter(signal_x) smooth_signal_y = self.gaussian_filter(signal_y) smooth_signal_z = self.gaussian_filter(signal_z) if DEBUG_PLOT: self.render_trio(signal_x, signal_y, signal_z, timestamps) self.render_trio(smooth_signal_x, smooth_signal_y, smooth_signal_z, timestamps) # g is the difference between the smoothed version and the actual version g = [[], [], []] delta_g = [[], [], []] delta_g[0] = np.subtract(signal_x, smooth_signal_x).tolist() delta_g[1] = np.subtract(signal_y, smooth_signal_y).tolist() delta_g[2] = np.subtract(signal_z, smooth_signal_z).tolist() g[0] = signal_x # np.subtract(signal_x, smooth_signal_x).tolist() g[1] = signal_y # np.subtract(signal_y, smooth_signal_y).tolist() g[2] = signal_z # np.subtract(signal_z, smooth_signal_z).tolist() dgt = utilities.diff(timestamps) # dgt 是每个时间戳的间隔 theta = [[], [], []] delta_theta = [[], [], []] for component in [0, 1, 2]: sum_of_consecutives = np.add(g[component][:-1], g[component][1:]) #首尾相加,作为零漂进行校准 # The 2 is for the integration - and 10e9 for the nanosecond dx_0 = np.divide(sum_of_consecutives, 2 * 1000000000) # 计算出每段事件的速度漂移 num_0 = np.multiply(dx_0, dgt) theta[component] = [0] theta[component].extend(np.cumsum(num_0)) sum_of_delta_consecutives = np.add( delta_g[component][:-1], delta_g[component][1:] ) dx_0 = np.divide(sum_of_delta_consecutives, 2 * 1000000000) num_0 = np.multiply(dx_0, dgt) delta_theta[component] = [0] delta_theta[component].extend(np.cumsum(num_0)) # 同理计算出delta_theta的漂移 # UNKNOWNS pixel_size = 2.9 # 2.9um focus_efficient = 3.2 # 21 mm focus_in_pixel = focus_efficient / (pixel_size / 1000) focal_length = focus_in_pixel gyro_delay = 0 gyro_drift = (0, 0, 0) shutter_duration = 0 # parts = self.mp4.split("/") # pickle_file_name = parts[-1].split(".")[0] # pickle_full_path = "%s/%s.pickle" % ("/".join(parts[:-1]), pickle_file_name) # print("Pickle file = %s" % pickle_full_path) pickle_full_path = "./videodata.data" videoObj = self.__read_video() print("Calibrating parameters") print("=====================+") parameters = np.asarray([focal_length, gyro_delay, gyro_drift[0], gyro_drift[1], gyro_drift[2], shutter_duration]) result = scipy.optimize.minimize( self.calcErrorAcrossVideoObjective, parameters, (videoObj, theta, timestamps), "Nelder-Mead", tol=0.001, ) print(result) focal_length = result["x"][0] gyro_delay = result["x"][1] gyro_drift = (result["x"][2], result["x"][3], result["x"][4]) shutter_duration = result["x"][5] print("Focal length = %f" % focal_length) print("Gyro delay = %f" % gyro_delay) print("Gyro drift = (%f, %f, %f)" % gyro_drift) print("Shutter duration= %f" % shutter_duration) # Smooth out the delta_theta values - they must be fluctuating like crazy smooth_delta_x = self.gaussian_filter(delta_theta[0], 128, 16) smooth_delta_y = self.gaussian_filter(delta_theta[1], 128, 16) smooth_delta_z = self.gaussian_filter(delta_theta[2], 128, 16) return ( delta_theta, timestamps, focal_length, gyro_delay, gyro_drift, shutter_duration, )
def calibrate(self): gdf = calibration.GyroscopeDataFile(self.csv) gdf.parse() signal_x = gdf.get_signal_x() signal_y = gdf.get_signal_y() signal_z = gdf.get_signal_z() timestamps = gdf.get_timestamps() # Smooth out the noise smooth_signal_x = self.gaussian_filter(signal_x) smooth_signal_y = self.gaussian_filter(signal_y) smooth_signal_z = self.gaussian_filter(signal_z) render_trio(signal_x, signal_y, signal_z, timestamps) render_trio(smooth_signal_x, smooth_signal_y, smooth_signal_z, timestamps) # g is the difference between the smoothed version and the actual version g = [ [], [], [] ] delta_g = [ [], [], [] ] delta_g[0] = np.subtract(signal_x, smooth_signal_x).tolist() delta_g[1] = np.subtract(signal_y, smooth_signal_y).tolist() delta_g[2] = np.subtract(signal_z, smooth_signal_z).tolist() g[0] = signal_x #np.subtract(signal_x, smooth_signal_x).tolist() g[1] = signal_y #np.subtract(signal_y, smooth_signal_y).tolist() g[2] = signal_z #np.subtract(signal_z, smooth_signal_z).tolist() dgt = utilities.diff(timestamps) theta = [ [], [], [] ] delta_theta = [ [], [], [] ] for component in [0, 1, 2]: sum_of_consecutives = np.add(g[component][:-1], g[component][1:]) # The 2 is for the integration - and 10e9 for the nanosecond dx_0 = np.divide(sum_of_consecutives, 2 * 1000000000) num_0 = np.multiply(dx_0, dgt) theta[component] = [0] theta[component].extend(np.cumsum(num_0)) sum_of_delta_consecutives = np.add(delta_g[component][:-1], delta_g[component][1:]) dx_0 = np.divide(sum_of_delta_consecutives, 2 * 1000000000) num_0 = np.multiply(dx_0, dgt) delta_theta[component] = [0] delta_theta[component].extend(np.cumsum(num_0)) # UNKNOWNS focal_length = 1080.0 gyro_delay = 0 gyro_drift = (0, 0, 0) shutter_duration = 0 parts = self.mp4.split("/") pickle_file_name = parts[-1].split(".")[0] pickle_full_path = "%s/%s.pickle" % ("/".join(parts[:-1]), pickle_file_name) print("Pickle file = %s" % pickle_full_path) import pickle videoObj = None if not os.path.exists(pickle_full_path): print("Pickle file not found - generating it") videoObj = GyroVideo(self.mp4) videoObj.read_video() fp = open(pickle_full_path, "w") pickle.dump(videoObj, fp) fp.close() else: fp = open(pickle_full_path, "r") videoObj = pickle.load(fp) fp.close() print "Calibrating parameters" print "=====================+" parameters = np.asarray([1080.0, 0.0, 0.0, 0.0, 0.0, 0.0]) import scipy.optimize result = scipy.optimize.minimize(self.calcErrorAcrossVideoObjective, parameters, (videoObj, theta, timestamps), 'Nelder-Mead', tol=0.001) print result focal_length = result['x'][0] gyro_delay = result['x'][1] gyro_drift = ( result['x'][2], result['x'][3], result['x'][4] ) shutter_duration = result['x'][5] print "Focal length = %f" % focal_length print "Gyro delay = %f" % gyro_delay print "Gyro drift = (%f, %f, %f)" % gyro_drift print "Shutter duration= %f" % shutter_duration # Smooth out the delta_theta values - they must be fluctuating like crazy smooth_delta_x = self.gaussian_filter(delta_theta[0], 128, 16) smooth_delta_y = self.gaussian_filter(delta_theta[1], 128, 16) smooth_delta_z = self.gaussian_filter(delta_theta[2], 128, 16) return (delta_theta, timestamps, focal_length, gyro_delay, gyro_drift, shutter_duration)
def run(self, n, weights, step=100, threshold=1e-4, runstore=None, progress=True): """ Simulate next `n` generations. Abort if average overall difference between consecutive generations is smaller than `threshold` (i.e. an equilibrium has been reached). Args: weights: dictionary of weights to be used in the calculation of the next generation frequencies n: int maximum number of generations to run step: int frequencies are stored every `step` generations threshold: float the `threshold` is divided by the frequency size (arr.ndim) to calculate `thresh`, and the simulation run is stopped if the average difference between consecutive generations has become smaller than `thresh`. runstore: storage.runstore instance if provided, simulation run is stored in datafile progress: progressbar.ProgressBar instance if none is provided, a new one is created """ MIG = weights['migration'] VIAB_SEL = weights['viability_selection'] REPRO_CONST = weights['constant_reproduction'] dyn_repro_weights = weights['dynamic_reproduction'] pt = dyn_repro_weights[0][0].pt #~ SR,TP = weights['dynamic_reproduction'] #~ pt = SR.pt self.runstore = runstore n += self.generation thresh = threshold/self.size # on average, each of the frequencies should change less than `thresh` if an equilibrium has been reached still_changing = True if isinstance(progress, (pbar.ProgressBar, DummyProgressBar)): pass # reuse progressbar elif progress is False: progress = generate_progressbar('dummy') elif progress is True: progress = generate_progressbar('real') #~ widgets=['Generation: ', pbar.Counter(), ' (', pbar.Timer(), ')'] #~ progress = pbar.ProgressBar(widgets=widgets, maxval=1e6).start() # don't provide maxval! , fd=sys.stdout else: raise TypeError, "`progress` must be True, False, or an existing progressbar instance" while still_changing and self.generation < n: # data storage: if self.runstore != None: if self.generation % step == 0: #~ self.runstore.dump_data(self.generation, self.freqs, self.all_sums()) self.runstore.dump_data(self) #~ self.runstore.flush() previous = np.copy(self.freqs) ### migration ################################## self.freqs = np.sum(self.freqs[np.newaxis,...] * MIG, 1) # sum over `source` axis self.normalize() ### viability selection ######################## self.freqs = self.freqs * VIAB_SEL self.normalize() ### reproduction ############################### #~ # species recognition: #~ SR.calculate( self.get_sums(['backA','backB']) ) #~ #~ # trait preferences: #~ TP.calculate( self.get_sums('trait') ) REPRO_DYN = 1. #np.ones( (1,)*self.repro_dim ) for DRW, target_loci in dyn_repro_weights: DRW.calculate( self.get_sums(target_loci) ) REPRO_DYN = REPRO_DYN * DRW.extended() # offspring production: females = extend( self.freqs, self.repro_dim, self.repro_idxs['female'] ) males = extend( self.freqs, self.repro_dim, self.repro_idxs['male'] ) #~ self.freqs = sum_along_axes( females * males * R * SR.extended() * TP.extended(), self.repro_idxs['offspring'] ) self.freqs = sum_along_axes( females * males * \ REPRO_CONST * \ REPRO_DYN, self.repro_idxs['offspring'] ) self.normalize() self.generation += 1 progress.update(self.generation) still_changing = utils.diff(self.freqs, previous) > thresh self.eq = not still_changing if self.runstore != None: # store final state self.runstore.dump_data(self) if self.eq: state_desc = 'eq' else: state_desc = 'max' self.runstore.record_special_state(self.generation, state_desc) # return ProgressBar instance so we can reuse it for further running: return progress
def test_diff(self): output = utilities.diff([0, 1, 2, 3, 4]) self.assertEqual(output.tolist(), [1, 1, 1, 1])
def parse_inputs(csv, video_name): gdf = calibration.GyroscopeDataFile(csv) gdf.parse() signal_x = gdf.get_signal_x() signal_y = gdf.get_signal_y() signal_z = gdf.get_signal_z() timestamps = gdf.get_timestamps() matlab_gyro_file_path = "%s.txt" % video_name if not os.path.exists(matlab_gyro_file_path): print("Matlab gyro file not found - generating it") matlab_gyro_file = open(matlab_gyro_file_path, 'w') matlab_gyro_file.write("size: [%d 4]\n" % len(signal_x)) for i in range(len(signal_x)): matlab_gyro_file.write(str(signal_x[i])+" "+str(signal_y[i])+" "+str(signal_z[i])+" "+str(timestamps[i])+"\n") timestamp_list = [] vidcap = cv2.VideoCapture("%s.mp4" % video_name) success, frame = vidcap.read() while success: timestamp = vidcap.get(0) * 1000 * 1000 # print "%d %s" % (timestamp_count, timestamp) timestamp_list.append(timestamp) success, frame = vidcap.read() # print("**** %d ****" % timestamp_count) matlab_gyro_file.write("size: [%d 1]\n" % len(timestamp_list)) for t in timestamp_list: matlab_gyro_file.write("%s\n" % str(t)) matlab_gyro_file.close() # Smooth out the noise smooth_signal_x = gaussian_filter(signal_x) smooth_signal_y = gaussian_filter(signal_y) smooth_signal_z = gaussian_filter(signal_z) render_trio(signal_x, signal_y, signal_z, timestamps) render_trio(smooth_signal_x, smooth_signal_y, smooth_signal_z, timestamps) # g is the difference between the smoothed version and the actual version g = [ [], [], [] ] delta_g = [ [], [], [] ] delta_g[0] = np.subtract(signal_x, smooth_signal_x).tolist() delta_g[1] = np.subtract(signal_y, smooth_signal_y).tolist() delta_g[2] = np.subtract(signal_z, smooth_signal_z).tolist() g[0] = signal_x #np.subtract(signal_x, smooth_signal_x).tolist() g[1] = signal_y #np.subtract(signal_y, smooth_signal_y).tolist() g[2] = signal_z #np.subtract(signal_z, smooth_signal_z).tolist() dgt = utilities.diff(timestamps) theta = [ [], [], [] ] delta_theta = [ [], [], [] ] for component in [0, 1, 2]: sum_of_consecutives = np.add(g[component][:-1], g[component][1:]) # The 2 is for the integration - and 10e9 for the nanosecond dx_0 = np.divide(sum_of_consecutives, 2 * 1000000000) num_0 = np.multiply(dx_0, dgt) theta[component] = [0] theta[component].extend(np.cumsum(num_0)) sum_of_delta_consecutives = np.add(delta_g[component][:-1], delta_g[component][1:]) dx_0 = np.divide(sum_of_delta_consecutives, 2 * 1000000000) num_0 = np.multiply(dx_0, dgt) delta_theta[component] = [0] delta_theta[component].extend(np.cumsum(num_0)) pickle_full_path = "%s.pickle" % video_name print("Pickle file = %s" % pickle_full_path) videoObj = None if not os.path.exists(pickle_full_path): print("Pickle file not found - generating it") videoObj = GyroVideo("%s.mp4" % video_name) videoObj.read_video() fp = open(pickle_full_path, "w") pickle.dump(videoObj, fp) fp.close() else: fp = open(pickle_full_path, "r") videoObj = pickle.load(fp) fp.close() parameters = (videoObj, theta, timestamps) return parameters