def save_video_frames(sensor_id, measurements, FPS=28.84): start_time = measurements[0].timestamp end_time = measurements[-1].timestamp cur_time = start_time pros = ImageProcessor() margin = 10 for index, meas in enumerate(measurements): comp = Image.new('RGB', (320 * 5 + margin * 4, 280), color=(255, 255, 255)) pros.set_thermal_data(meas.data) local_time = get_time_str(meas.timestamp, microseconds=True, seconds=True) pros_imgs = pros.get_imgs() for img_index, img in enumerate(pros_imgs): comp.paste(img, (img_index * (320 + margin), 20)) d = ImageDraw.Draw(comp) d.text((0, 0), local_time, fill=(0, 0, 0)) img_name = f'{frame_folder}{sensor_id}_' + ('000000' + str(index))[-6:] + '.png' print(img_name) comp.save(img_name)
def get_scaled_img(self, des_size): img = self.processor.plot_centroids(rgb=True) img = img.repeat(2, axis=0) img = img.repeat(2, axis=1) img = Image.fromarray(img, 'RGB') img_width = img.size[0] img_height = img.size[1] comp = Image.new('RGB', (img_width, img_height + 20)) comp.paste(img, (0, 20)) local_time = get_time_str(self.timestamp, microseconds=True, seconds=True) d = ImageDraw.Draw(comp) d.text((0, 0), local_time, fill=(255, 255, 255)) comp.thumbnail(des_size, Image.ANTIALIAS) return comp
def update_episodes(self): """ This function combines all the data from the active sensors (not actual sensor, the class) and creates one long list from them. After creating this list the values get sorted so the values are in chronological order. After the sorting the list get spliced up into episodes. A episode is defined as measurements that all satisfy these conditions: -cond1: The next measurement should be within connect time from the previous one. This is used to separate different bursts of measurements. If zero this is ignored. -cond2: The difference in time between first and last measurement should be smaller then the slice time. This is used to avoid excessive long episodes. If zero this is ignored. :return: - """ # Combine data from different sources and sort for easier plotting data = [] for sensor in self.sensors: if sensor.is_active(): data.extend(sensor.get_data()) data = sorted(data, key=operator.attrgetter('timestamp'), reverse=True) # Clear UI and setup variables self.episodes = [] self.episode_sensors = [] self.ui.timeList.clear() if len(data) == 0: return episode_starttime = data[0].timestamp current_starttime = data[0].timestamp current_episode = [] current_set = {data[0].sensor_id} # Slice the data up into episodes for value in data: if len(current_episode) == 0: episode_starttime = value.timestamp diff_connect = (current_starttime - value.timestamp).seconds diff_slice = (episode_starttime - value.timestamp).seconds if (diff_connect < self.connect_time or self.connect_time == 0) and (diff_slice < self.slice_time or self.slice_time == 0): current_episode.append(value) current_set.add(value.sensor_id) else: if len(current_episode) > 0: self.episodes.append(current_episode[::-1]) self.episode_sensors.append(current_set) current_episode = [] current_set = set() current_starttime = value.timestamp if len(current_episode) > 0: self.episodes.append(current_episode[::-1]) self.episode_sensors.append(current_set) # Create string for UI list and populate that list for episode in self.episodes: date_str = get_time_str(episode[0].timestamp, time=False) start_time_str = get_time_str(episode[0].timestamp, date=False) stop_time_str = get_time_str(episode[-1].timestamp, date=False) episode_str = f'{date_str} {start_time_str}->{stop_time_str}' self.ui.timeList.addItem(episode_str)
This files convert a database (from pgadmin) to a csv that contains 3 columns: the centroids, epoch time and local time """ import csv from localization.processing import ImageProcessor from help_module.time_helper import convert_to_datetime, get_time_str pros = ImageProcessor() data_list = [] folder_location = '../../data/' file_name = '19042019.csv' with open(folder_location + file_name) as csvfile: reader = csv.reader(csvfile, delimiter=',') for index, row in enumerate(reader): # print(row) # print(index) thermal_data = eval(row[1]) centroids = pros.process(thermal_data) meas_datetime = convert_to_datetime(row[3]) epoch_time = meas_datetime.timestamp() local_time = get_time_str(meas_datetime, microseconds=True) data_list.append([centroids, epoch_time, local_time]) with open(folder_location + 'centroid_' + file_name, 'w+', newline='') as csvfile: writer = csv.writer(csvfile) for row in data_list: writer.writerow(row)