import urx import pickle import numpy as np import argparse import utils from cam_tool import cam_tool from pyquaternion import Quaternion parser = argparse.ArgumentParser() parser.add_argument('--data_dir', type=str, default='/home/tidyboy/calibration/ext_data/') parser.add_argument('--cam_type', type=str) args = parser.parse_args() utils.create_muldir(args.data_dir, args.data_dir + '/images', args.data_dir + '/poses') #data_dir = '/home/tidyboy/ARAICodes/catkin_ws/src/robot_cal_tools/rct_examples/data/target_6x7_0/' if args.cam_type == 'sony' or args.cam_type == 'both': static_cam = cam_tool('sony') if args.cam_type == 'rs' or args.cam_type == 'both': wrist_cam = cam_tool('rs') robot = urx.Robot("192.168.1.109") pic_id = 0 initialized = False starttime = datetime.datetime.now() nexttime = starttime + datetime.timedelta(seconds=5)
images = utils.find_images_in_dir(filename) imgs, imgpoints = utils.find_imagepoints_from_images(images, args.w, args.h) # 2d points in image plane. objpoints = utils.generate_objpoints(len(imgpoints), args.w, args.h, args.size) # 3d point in real world space imgpoints = np.array([imgpoints[i] for i in imgpoints]) cv2.destroyAllWindows() print (imgs[0].shape[1], imgs[0].shape[0]) ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, (imgs[0].shape[1], imgs[0].shape[0]), None, None) print(ret) print(mtx) print(dist) print(rvecs) print(tvecs) utils.create_muldir('results', 'results/intrinsic', 'results/intrinsic/%s'%args.cam) pickle.dump(np.array(ret), open('results/intrinsic/%s/ret.pkl'%args.cam, 'wb')) pickle.dump(np.array(mtx), open('results/intrinsic/%s/mtx.pkl'%args.cam, 'wb')) pickle.dump(np.array(dist), open('results/intrinsic/%s/dist.pkl'%args.cam, 'wb')) pickle.dump(np.array(rvecs), open('results/intrinsic/%s/rvecs.pkl'%args.cam, 'wb')) pickle.dump(np.array(tvecs), open('results/intrinsic/%s/tvecs.pkl'%args.cam, 'wb')) cam.exit() for img_fname in glob.glob('%s_*.jpg'%filename): os.remove(img_fname)
from cam_tool import cam_tool from pyquaternion import Quaternion parser = argparse.ArgumentParser() parser.add_argument('--data_dir', type=str, default='ext_data/') parser.add_argument('--cam', type=str, help="cameras to make data pairs with. one of ['static', 'wrist', 'both']") parser.add_argument('--static_cam_model', type=str, default='sony') parser.add_argument('--wrist_cam_model', type=str, default='rs') parser.add_argument('--auto', type=int, default = 0) parser.add_argument('--num_pic', type=int, default=50, help="number of pictures to take") parser.add_argument('--period', type=float, default=0.5, help="period between which pictures would be taken") args = parser.parse_args() utils.create_muldir(args.data_dir, args.data_dir + '/images', args.data_dir + '/poses',\ args.data_dir + '/images/' + args.cam, args.data_dir + '/poses/' + args.cam) #data_dir = '/home/tidyboy/ARAICodes/catkin_ws/src/robot_cal_tools/rct_examples/data/target_6x7_0/' static_on = (args.cam == 'static' or args.cam == 'both') wrist_on = (args.cam == 'wrist' or args.cam == 'both') if static_on: static_cam = cam_tool(args.static_cam_model) if wrist_on: wrist_cam = cam_tool(args.wrist_cam_model) robot = urx.Robot("192.168.1.109") pic_id = 0 initialized = False
from Bauhausbreak import BHB_Environment from Racetrack import RT_Environment from MCTS_agent import MCTS_agent import utils import argparse parser = argparse.ArgumentParser() parser.add_argument("--task", default='BHB', type=str) parser.add_argument("--size", default=8, help="environment size", type=int) args = parser.parse_args() if __name__ == '__main__': name_format = utils.NameFormat('size') FILE_ID = name_format.get_id_from_args(args) RESULT_DIR = '/home/yskim5892/DQL_results/%s/' % args.task LOG_DIR = RESULT_DIR + '/log/%s/' % FILE_ID utils.create_muldir(LOG_DIR) if args.task == 'BHB': args.print_ep_period = 50 BHB_env = BHB_Environment(args.size) agent = MCTS_agent([args.size, args.size, 16], args.size, args, LOG_DIR) agent.run(BHB_env)