예제 #1
0
# Create a Tracking Scenario
scenario = TrackingScenario(trackers)

# Track the video using the preconfigured scenario
retval, tl = scenario.track(video_path, pix_per_m=4441, start_frame=160, end_frame=210)

# Computing the trajectory of the led referred to the center pivot
center, led = tl
led_centered = led - center
led_centered.id = 'led'

# Computing the trajectory of the wheel referred to the center pivot
wheel_centered = led_centered.copy()
wheel_centered.add_polar_offset(0.039, 0)
wheel_centered.id = 'wheel'
plot_trajectories([wheel_centered, led_centered])

# Computing the trajectory of the wheel referred to its initial position
wheel = wheel_centered - wheel_centered.r[0]

# Computing the linear velocity in optimal conditions (omega x r)
v_opt = 4 * 0.07

# Computing the linear velocity by the results of the tracking
v_meas = wheel.v.norm

#Computing the efficiency
eff = v_meas/v_opt

# Plotting the linear displacement of the wheel
import matplotlib.pyplot as plt
예제 #2
0
            points = self.r[:, :, i]
            trajs.append(
                Trajectory(points=points,
                           dt=self.dt,
                           t=self.t,
                           traj_id=f"LangevinSolution {i + 1}"))
        return trajs


if __name__ == '__main__':

    from yupi.analyzing import plot_trajectories

    # set parameter values
    T = 500  # total time (number of time steps if dt==1)
    dim = 2  # dimension of the walker trajectories
    N = 3  # number of random walkers
    dt = 1  # time step

    # probability of every action to be taken
    # according to every axis
    prob = [
        [.5, .1, .4],  # x-axis
        [.5, 0, .5]
    ]  # y-axis

    # get RandomWalk object and get position vectors
    rw = RandomWalkGenerator(T, dim, N, dt, prob)
    tr = rw.generate()
    plot_trajectories(tr)
예제 #3
0
from yupi.tracking import ROI, ObjectTracker, TrackingScenario
from yupi.tracking import ColorMatching
from yupi.analyzing import plot_trajectories

# Initialize main tracking objects
algorithm = ColorMatching((180, 125, 35), (190, 135, 45))
blue_ball = ObjectTracker('blue', algorithm, ROI((100, 100)))
scenario = TrackingScenario([blue_ball])

# Track the video using the preconfigured scenario
retval, tl = scenario.track('resources/videos/demo.avi', pix_per_m=10)
plot_trajectories(tl)
예제 #4
0
noise_scale_adim = np.sqrt(2 * dt_adim)  # scale parameter of noise pdf
v0_adim = np.random.randn(dim, N)  # initial dimensionaless speeds

## 2. Simulating the process

lg = LangevinGenerator(tt_adim, dim, N, dt_adim, v0=v0_adim)
lg.set_scale(v_scale=vr, r_scale=lr, t_scale=tr)
trajs = lg.generate()

## 3. Data analysis and plots

plt.figure(figsize=(9, 5))

# Spacial trajectories
ax1 = plt.subplot(231)
ypa.plot_trajectories(trajs[:5], legend=False, show=False)

#  velocity histogram
v = ypa.estimate_velocity_samples(trajs, step=1)
ax2 = plt.subplot(232)
ypa.plot_velocity_hist(v, bins=20, show=False)

#  turning angles
theta = ypa.estimate_turning_angles(trajs)
ax3 = plt.subplot(233, projection='polar')
ypa.plot_angle_distribution(theta, show=False)

#  mean square displacement
lag_msd = 30
msd, msd_std = ypa.estimate_msd(trajs, time_avg=True, lag=lag_msd)
ax4 = plt.subplot(234)