def softmax_smooth2(self, arr_2d, steep): # matrix: numpy matrix # steep: close to 0 means taking average, high steep means only looking at new data logging.debug("SMOOTHING TIME!") # sum_0_n(e^(-steepness*n) x_n) / sum_0_n(e^(-steepness*n)) time_now = time_hns() # print(arr_2d.shape) n_array = np.arange(1, arr_2d.shape[0] + 1) numerator = np.apply_along_axis(self.softmax_numerator, 0, arr_2d, n_array, steep) # print(numerator) denominator = np.sum(np.exp(-steep * n_array)) # smooth_array = np.divide(numerator, denominator) smooth_array = numerator / denominator logging.debug("\t\tTIME SMOOTH: smooth 2d array: {}".format( (time_hns() - time_now) / 1000000)) # # sum_0_n(e^(-steepness*n) x_n) / sum_0_n(e^(-steepness*n)) # numerator = 0 # denominator = 0 # for n, x in enumerate(series): # reversed() # numerator += math.exp(-steep * n) * x # denominator += math.exp(-steep * n) # TODO calculate once # # # print(f"numerator: {numerator}") # # print(f"denominator: {denominator}") # smooth = numerator / denominator # # print(f"smooth: {smooth}\n") return smooth_array
def set_msg(self, frame_tracker): # get single frame row = self.df_csv.loc[frame_tracker] # init a message dict self.msg = dict() # get confidence in tracking if exist if 'confidence' in self.df_csv: self.msg['confidence'] = row['confidence'] # if no confidence, set it to 1.0 else: self.msg['confidence'] = 1.0 # metadata in message self.msg['frame'] = int(row['frame']) self.msg['timestamp'] = row['timestamp'] if not self.smooth: self.msg['smooth'] = False # check confidence high enough, else return None as data if self.msg['confidence'] >= .7: # au_regression in message self.msg['au_r'] = self.df_au.loc[frame_tracker].to_dict() # print(msg['au_r']) # eye gaze in message as AU eye_angle = self.df_eye_gaze.loc[frame_tracker].get( ["gaze_angle_x", "gaze_angle_y"]).values # radians print(eye_angle) # eyes go about 60 degree, which is 1.0472 rad, so no conversion needed? self.msg['gaze'] = {} self.msg['gaze']['gaze_angle_x'] = eye_angle[0] self.msg['gaze']['gaze_angle_y'] = eye_angle[1] # head pose in message self.msg['pose'] = self.df_head_pose.loc[frame_tracker].to_dict() # print(msg['pose']) # logging purpose; time taken from message publish to animation self.msg['timestamp_utc'] = time_hns() # self.time_now()
async def pub_sub_function(self, apply_function): # async """Subscribes to FACS data, smooths, publishes it""" new_smooth_object = False # await messages logging.info("Awaiting FACS data...") # without try statement, no error output try: # keep listening to all published message on topic 'facs' while True: # msg = await self.sub_socket.recv_multipart() key, timestamp, data = await self.sub_socket.sub() logging.debug("Received message: {}".format( [key, timestamp, data])) # check not finished; timestamp is empty (b'') if timestamp: # measure time time_begin = time_hns() time_now = time_begin # only pass on messages with enough tracking confidence; always send when no confidence param if 'confidence' not in data or data['confidence'] >= 0.7: # don't smooth data with 'smooth' == False; if 'smooth' not in data or data['smooth']: # if topic changed, instantiate a new SmoothData object if key not in self.smooth_obj_dict: self.smooth_obj_dict[key] = SmoothData() new_smooth_object = True logging.debug("TIME: smooth class init: {}".format( (time_hns() - time_now) / 1000000)) time_now = time_hns() # check au dict in data and not empty if "au_r" in data and data['au_r']: # logging.debug(f"TIME: Convert gaze to AU: {(time.time_ns() - time_now) / 1000000}") # time_now = time.time_ns() # sort dict; dicts keep insert order Python 3.6+ # au_r_dict = data['au_r'] data['au_r'] = dict( sorted(data['au_r'].items(), key=lambda k: k[0])) # match number of multiplier columns: if new_smooth_object: self.smooth_obj_dict[ key].set_new_multiplier( len(data['au_r'])) new_smooth_object = False # smooth facial expressions; window_size: number of past data points; # steep: weight newer data # data['au_r'] = smooth_func(au_r_sorted, queue_no=0, window_size=4, steep=.35) data['au_r'] = getattr( self.smooth_obj_dict[key], apply_function)(data['au_r'], queue_no=0, window_size=3, steep=.25) logging.debug("TIME: Smooth AU: {}".format( (time_hns() - time_now) / 1000000)) time_now = time_hns() # check head rotation dict in data and not empty if "pose" in data and data['pose']: # smooth head position # data['pose'] = smooth_func(data['pose'], queue_no=1, window_size=4, steep=.2) data['pose'] = getattr( self.smooth_obj_dict[key], apply_function)(data['pose'], queue_no=1, window_size=6, steep=.15) logging.debug( "TIME: Smooth head pose: {}".format( (time_hns() - time_now) / 1000000)) # time_now = time.time() else: logging.debug( "No smoothing applied, forwarding unchanged") # remove topic from dict when msgs finish removed_topic = self.smooth_obj_dict.pop(key, None) logging.debug( "Removing topic from smooth_obj_dict: {}". format(removed_topic)) # convert gaze into AU 61, 62, 63, 64 if "gaze" in data: data['au_r'] = self.gaze_to_au( data['au_r'], data['gaze']) # remove from message after AU convert data.pop('gaze') # send modified message logging.debug("TIME: Smoothed data: {}".format(data)) logging.info(data) await self.pub_socket.pub(data, key) # send message we're done else: print("No more messages to pass; finished") await self.pub_socket.pub(b'', key) logging.debug("TIME: Total bridge: {}".format( (time_hns() - time_now) / 1000000)) except: print("Error with sub") # print(e) logging.error(traceback.format_exc()) print()
def trailing_moving_average(self, data_dict, queue_no, window_size=3, steep=1): # data_dict: json formatted string containing data # queue_no: which data history should be used # trail: how many previous dicts should be remembered # measure time time_begin = time_hns() time_now = time_begin # no smoothing if window_size <= 1: return data_dict else: # convert dict to pandas dataframe # d_series = data_dict # pd.read_json(data_dict, typ='series') # create a new queue to store a new type of data when no queue exist yet if len(self.dataframe_list) <= queue_no: # convert series to data frame # d_frame = d_series.to_frame() # use labels as column names; switch index with columns # d_frame = d_frame.transpose() logging.debug(data_dict) d_frame = pd.DataFrame.from_dict(data_dict, orient='index') d_frame = d_frame.transpose() # d_frame = pd.DataFrame.from_dict(list(data_dict.items())) logging.debug(d_frame) # add calculated denominator as meta-data # d_frame.steep = steep logging.debug("Add new queue") self.dataframe_list.append(d_frame) logging.debug("TIME SMOOTH: Dict to pd dataframe: {}".format( (time_hns() - time_now) / 1000000)) time_now = time_hns() # return data frame in json format return data_dict # use [trail] previous data dicts for moving average else: # transform dict to series d_series = pd.Series(data_dict) logging.debug("TIME SMOOTH: Dict to pd series: {}".format( (time_hns() - time_now) / 1000000)) time_now = time_hns() # get data frame d_frame = self.dataframe_list[queue_no] # add data series to data frame at first postion # d_frame = d_frame.append(d_series, ignore_index=True) # , ignore_index=True d_frame.loc[-1] = d_series # adding a row d_frame.index = d_frame.index + 1 # shifting index d_frame = d_frame.sort_index() # sorting by index # drop row when row count longer than trail if d_frame.shape[0] > window_size: # drop first row (frame) # d_frame.drop(d_frame.index[0], inplace=True) # drop last row (oldest frame) d_frame.drop(d_frame.tail(1).index, inplace=True) # put our data frame back for next time self.dataframe_list[queue_no] = d_frame logging.debug( "TIME SMOOTH: Insert pd series into dataframe: {}".format( (time_hns() - time_now) / 1000000)) time_now = time_hns() # use softmax-like function to smooth smooth_data = d_frame.apply(self.softmax_smooth, args=(steep, )) # axis=1, logging.debug("TIME SMOOTH: Smooth data: {}".format( (time_hns() - time_now) / 1000000)) time_now = time_hns() # apply AU multiplier if queue_no == 0: smooth_data = smooth_data * self.multiplier logging.debug("TIME SMOOTH: Multiplier: {}".format( (time_hns() - time_now) / 1000000)) # time_now = time_hns() return smooth_data.to_dict()
def trailing_moving_average2(self, data_dict, queue_no, window_size=3, steep=1): # data_dict: json formatted string containing data # queue_no: which data history should be used # trail: how many previous dicts should be remembered # measure time time_begin = time_hns() time_now = time_begin # no smoothing if window_size <= 1: return data_dict else: logging.debug("Window size bigger than 1") # print(data_dict) logging.debug(f"len data_dict:{len(data_dict)}") array = np.fromiter(data_dict.values(), dtype=float, count=len(data_dict)) # , count=len(data_dict) # print(array) array = np.reshape(array, (-1, len(array))) logging.debug("\t\tTIME SMOOTH: dict to array: {}".format( (time_hns() - time_now) / 1000000)) time_now = time_hns() print(array) # create a new queue to store a new type of data when no queue exist yet if len(self.data_list) <= queue_no: logging.debug("smooth 2d array") # if not smooth_matrix.any(): # self.smooth_matrix = array # self.data_list.append(np.asmatrix(array)) # array_2d = np.reshape(array, (-1, len(array))) self.data_list.append(array) # smooth_2d_array = array # TODO call calculate denominator # change AU intensity with GUI multiplier if queue_no == 0: array = array * self.multiplier # values back into dict data_dict = dict(zip(data_dict.keys(), array.flatten())) return data_dict # not reached window_size yet else: logging.debug("queue exists") smooth_2d_array = self.data_list[queue_no] logging.debug("smooth_2d_array shape: {}".format( smooth_2d_array.shape)) # matrix not yet window size if self.data_list[queue_no].shape[0] <= window_size: # add new array on top smooth_2d_array = np.concatenate((array, smooth_2d_array), axis=0) else: # slower; drop last row and put new array above # smooth_2d_array = np.concatenate((array, smooth_2d_array[:-1, :]), axis=0) # faster; replace oldest (lowest) array and shift new below row to top smooth_2d_array[-1] = array smooth_2d_array = np.roll(smooth_2d_array, 1, axis=0) logging.debug("stacked") logging.debug(smooth_2d_array) logging.debug("\n\n\n") # store for next message self.data_list[queue_no] = smooth_2d_array logging.debug("\t\tTIME SMOOTH: stack array: {}".format( (time_hns() - time_now) / 1000000)) time_now = time_hns() logging.debug("Smooth matrix") # matrix to smoothed array # TODO function data_smoothed = self.softmax_smooth2(smooth_2d_array, steep) # change AU intensity with GUI multiplier if queue_no == 0: data_smoothed = data_smoothed * self.multiplier # values back into dict data_dict = dict(zip(data_dict.keys(), data_smoothed)) logging.debug("\t\tTIME SMOOTH: array to dict: {}\n\n".format( (time_hns() - time_now) / 1000000)) # time_now = time.time_ns() return data_dict