Exemple #1
0
    e_z = array([0., 0., 1.])
    e_x = pendulum.target - pendulum.com.p
    e_x = normalize(e_x - dot(e_x, e_z) * e_z)
    z_diff = (pendulum.com.z - contact.z) - z_crit_des
    x = -dot(e_x, (contact.p - pendulum.com.p))
    xd = -x / (2 * z_diff) * (-zd + sqrt(zd**2 + 2 * 9.81 * z_diff))
    vel = xd * e_x + zd * e_z
    if '--gui' in sys.argv:
        global vel_handle
        vel_handle = draw_line(pendulum.com.p, pendulum.com.p + 0.5 * vel)
    pendulum.com.set_vel(vel)
    sim.step()


if __name__ == "__main__":
    sim = pymanoid.Simulation(dt=3e-2)
    try:  # use HRP4 if available
        robot = pymanoid.robots.HRP4()
    except:  # otherwise use default model
        robot = pymanoid.robots.JVRC1()
    robot.set_transparency(0.5)
    robot.suntan()
    robot.add_shoulder_abduction_task(weight=1e-4)
    robot.add_shoulder_flexion_task(weight=1e-4)
    if '--gui' in sys.argv:
        sim.set_viewer()
        sim.viewer.SetBkgndColor([1, 1, 1])
        sim.viewer.SetCamera(
            [[-0.43496840, 0.32393275, -0.84016074, 2.07345104],
             [+0.88804317, 0.30865173, -0.34075422, 0.77617097],
             [+0.14893562, -0.89431632, -0.42192002, 1.63681912],
Exemple #2
0
        print "\nNo scenario specified. Using elliptic staircase."
        staircase = "elliptic-staircase"
    contact_feed = ContactFeed(
        path='scenarios/%s/contacts.json' % staircase,
        cyclic=True)
    for (i, contact) in enumerate(contact_feed.contacts):
        contact.link = robot.right_foot if i % 2 == 0 else robot.left_foot
    return contact_feed


if __name__ == "__main__":
    if "-h" in sys.argv or "--help" in sys.argv:
        print_usage()
        sys.exit()
    random.seed(34)
    sim = pymanoid.Simulation(dt=0.03)
    try:
        robot = pymanoid.robots.HRP4()
    except:  # model not found
        robot = pymanoid.robots.JVRC1()
    robot.set_transparency(0.3)
    sim.set_viewer()
    set_camera_1()
    contact_feed = load_scenario()
    init_posture(contact_feed)
    pendulum = FIP(
        robot.mass, omega2=9.81 / robot.leg_length, com=robot.com,
        zmp_delay=ZMP_DELAY, zmp_noise=ZMP_NOISE)
    state_estimator = StateEstimator(
        pendulum, ESTIMATION_DELAY, COM_NOISE, COMD_NOISE)
    wpg = WalkingPatternGenerator(
        """
        Run CoM predictive control for single-support state.
        """
        if self.preview_time >= self.mpc_timestep:
            if self.state == "DoubleSupport":
                self.update_mpc(self.rem_time, self.ssp_duration)
            else:  # self.state == "SingleSupport":
                self.update_mpc(0., self.rem_time)
        com_jerk = array([self.x_mpc.U[0][0], self.y_mpc.U[0][0], 0.])
        stance.com.integrate_constant_jerk(com_jerk, dt)
        self.preview_time += dt


if __name__ == "__main__":
    dt = 0.03  # [s]
    sim = pymanoid.Simulation(dt=dt)
    robot = JVRC1(download_if_needed=True)
    sim.set_viewer()
    sim.set_camera_transform(
        [[-0.86825231, 0.13816899, -0.47649476, 2.95342016],
         [0.49606811, 0.22750768, -0.8379479, 3.26736617],
         [-0.0073722, -0.96392406, -0.2660753, 1.83063173], [0., 0., 0., 1.]])
    robot.set_transparency(0.3)

    footsteps = generate_footsteps(distance=2.1,
                                   step_length=0.3,
                                   foot_spread=0.1,
                                   friction=0.7)
    com_target = PointMass([0, 0, robot.leg_length], robot.mass)
    stance = Stance(com=com_target,
                    left_foot=footsteps[0].copy(hide=True),