from pyquaternion import Quaternion

parser = argparse.ArgumentParser()
parser.add_argument('--data_dir',
                    type=str,
                    default='/home/tidyboy/calibration/ext_data/')
parser.add_argument('--cam_type', type=str)
args = parser.parse_args()

utils.create_muldir(args.data_dir, args.data_dir + '/images',
                    args.data_dir + '/poses')

#data_dir = '/home/tidyboy/ARAICodes/catkin_ws/src/robot_cal_tools/rct_examples/data/target_6x7_0/'

if args.cam_type == 'sony' or args.cam_type == 'both':
    static_cam = cam_tool('sony')
if args.cam_type == 'rs' or args.cam_type == 'both':
    wrist_cam = cam_tool('rs')

robot = urx.Robot("192.168.1.109")

pic_id = 0
initialized = False

starttime = datetime.datetime.now()
nexttime = starttime + datetime.timedelta(seconds=5)
while True:
    if args.cam_type == 'sony' or args.cam_type == 'both':
        static_cam.capture('static_view.jpg')
    if args.cam_type == 'rs' or args.cam_type == 'both':
        wrist_cam.capture('wrist_view.jpg')
Esempio n. 2
0
                    help="number of cols of the target")
parser.add_argument('--h',
                    type=int,
                    default=6,
                    help="number of rows of the target")
parser.add_argument('--size',
                    type=float,
                    default=0.0228,
                    help='size of each lattice of target')
parser.add_argument('--static_cam_model', type=str, default='sony')
parser.add_argument('--wrist_cam_model', type=str, default='rs')
parser.add_argument('--proj_to_image', type=int, default=1)
args = parser.parse_args()

if args.cam == 'static':
    cam = cam_tool(args.static_cam_model)
elif args.cam == 'wrist':
    cam = cam_tool(args.wrist_cam_model)

cam.capture('preview.jpg')
img = cv2.imread('preview.jpg')
_, imgpoints = utils.find_imagepoints_from_images([img],
                                                  args.w,
                                                  args.h,
                                                  show_imgs=1)
imgpoints = np.squeeze(imgpoints[0])

robot = urx.Robot('192.168.1.109')
W2B = np.array(robot.get_pose().array)

center = np.array([-0.45165, -0.1984, 0])
Esempio n. 3
0
import pygame
import argparse
from cam_tool import cam_tool

parser = argparse.ArgumentParser()
parser.add_argument('--cam_type',
                    type=str,
                    help="Camera type. one of realsense and sony")
args = parser.parse_args()


def quit_pressed():
    for event in pygame.event.get():
        if event.type == pygame.QUIT:
            return True
    return False


cam = cam_tool(args.cam_type)

while not quit_pressed():
    filename = 'preview.jpg'
    cam.capture(filename)
    cam.show(filename)
Esempio n. 4
0
import argparse
import utils
from cam_tool import cam_tool

parser = argparse.ArgumentParser()
parser.add_argument('--cam_model',
                    type=str,
                    help="Camera model. one of rs(realsense) and sony")
parser.add_argument('--depth', type=int, default=0, help="to use depth map")
args = parser.parse_args()

cam = cam_tool(args.cam_model)

while not utils.quit_pressed():
    filename = 'preview.jpg'
    cam.capture(filename, args.depth)
    utils.show(filename)
Esempio n. 5
0
def wait():
    while True:
        if args.auto != 0:
            t = datetime.datetime.now()
            if t > nexttime:
                return
        else:
            for event in pygame.event.get():
                if event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE:
                    return


robot = urx.Robot("192.168.1.109")

cam = cam_tool('sony')
cam.capture('preview.jpg')
cam.show('preview.jpg')

criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)
img = cv2.imread('preview.jpg')
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)

ret, corners = cv2.findChessboardCorners(img, (args.h, args.w), None)
if ret:
    corners = cv2.cornerSubPix(img, corners, (11, 11), (-1, -1), criteria)
    img = cv2.drawChessboardCorners(img, (args.h, args.w), corners, ret)
    cv2.imshow('img', img)
    cv2.waitKey(0)

with open('results/extrinsic/static/mtx.pkl', 'rb') as f:
Esempio n. 6
0
def wait():
    while True:
        if args.auto != 0:
            t = datetime.datetime.now()
            if t > nexttime:
                return
        else:
            for event in pygame.event.get():
                if event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE:
                    return


robot = urx.Robot("192.168.1.109")

cam = cam_tool('rs')
cam.capture('preview.jpg')
cam.show('preview.jpg')

_, imgpoints = utils.find_imagepoints_from_images([cv2.imread('preview.jpg')],
                                                  args.w,
                                                  args.h,
                                                  show_imgs=1)
imgpoints = np.squeeze(imgpoints[0])

img = cv2.imread('preview.jpg')
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)

with open('results/extrinsic/wrist/mtx.pkl', 'rb') as f:
    C2W = pickle.load(f)[0]
    print 'C2W :\n', C2W