def process(new_data): nonlocal prev new_point = add_points(scale_point(prev, percent), scale_point(new_data, 1.0 - percent)) prev = new_point return new_point
def process(new_data): nonlocal prev_prediction nonlocal est_velocity current_velocity = add_points(new_data, invert_point(prev_prediction)) est_velocity = add_points( scale_point(est_velocity, velocity_smoothing_factor), scale_point(current_velocity, 1.0 - velocity_smoothing_factor)) predicted_point = add_points(prev_prediction, est_velocity) smoothed_predicted_point = add_points( scale_point(predicted_point, prediction_factor), scale_point(prev_prediction, 1.0 - prediction_factor)) prev_prediction = smoothed_predicted_point return smoothed_predicted_point
def process(new_data): nonlocal history history.append(new_data) if len(history) > len(weights): history = history[-len(weights):] p_sum = Point(0, 0) for point, weight in zip(reversed(history), weights): p_sum = add_points(p_sum, scale_point(point, weight)) return p_sum
def generate_line_segment(start_point, end_point, steps): """ Returns an array of points starting at start point, ending at end_point, with the defined number of steps. Note that end point is NOT included, so that consecutive line segments cane be joined easily.""" points = [] total_delta = add_points(invert_point(start_point), end_point) for i in range(steps): percent = i / steps current_delta = scale_point(total_delta, percent) current_point = add_points(start_point, current_delta) points.append(current_point) return points
def process(new_data): nonlocal history history.append(new_data) if len(history) > len(weights): history = history[-len(weights):] p_sum = Point(0, 0) # Ensure the weights are normalized for the available history normalized_weights = weights[:len(history)] normalized_weights = [ w / sum(normalized_weights) for w in normalized_weights ] for point, weight in zip(reversed(history), normalized_weights): p_sum = add_points(p_sum, scale_point(point, weight)) return p_sum