Пример #1
0
def compose(task, *args, models: bool = False, **kwargs) -> dict:
    cfg = hydra_compose(*args, **kwargs)
    OmegaConf.set_struct(cfg, False)
    task_name = get_task(task, name=True, models=models)
    cfg["task"] = task_name
    cfg["data"] = load_dataset_info(get_task(task_name))

    for imp in cfg.get("additional_imports", []):
        print(f"Additional import found {imp}")
        importlib.import_module(imp)

    return cfg
Пример #2
0
def test_omniglot_integration(adaptation_method, experiment_setting):
    def generate_dummy_miniimagenet_data(dir_path):
        """Generates dummy data that imitates mini-ImageNet structure.

        Mini-ImageNet is too heavy for integration testing, so we generate
        synthetic data (dummy images) that satisfy the mini-ImageNet spec.
        """
        num_dummy_categories = 20
        num_dummy_img_per_category = 10
        img_height, img_width = 84, 84
        data_path = os.path.join(dir_path, "miniimagent")
        for set_name in ["train", "valid", "test"]:
            set_path = os.path.join(data_path, set_name)
            for cid in range(num_dummy_categories):
                dummy_category_name = f"n{cid:05d}"
                dummy_category_path = os.path.join(set_path, dummy_category_name)
                os.makedirs(dummy_category_path)
                for img_id in range(num_dummy_img_per_category):
                    img_array = np.full(
                        (img_height, img_width), img_id * 20, dtype=np.int8
                    )
                    img_path = os.path.join(dummy_category_path, f"{img_id}.jpg")
                    Image.fromarray(img_array).convert("RGB").save(img_path)

    with tempfile.TemporaryDirectory() as dir_path:
        logger.info(f"Generating dummy mini-ImageNet...")
        generate_dummy_miniimagenet_data(dir_path)
        data_path = os.path.join(dir_path, "miniimagent")
        cfg = hydra_compose(
            config_file="config.yaml",
            overrides=[
                f"adaptation={adaptation_method}",
                f"test=miniimagenet/{experiment_setting}",
                f"data=miniimagenet",
                f"network=miniimagenet",
                f"data.source.data_dir={data_path}",
            ],
            strict=False,
        )
        if cfg.train is not None:
            train(cfg, work_dir=dir_path)
        if cfg.eval is not None:
            evaluate(cfg, work_dir=dir_path)
def setup_dict():
    """
    if model is "ground_truth":
        tensorType = 'torch.DoubleTensor'
        torch.set_default_tensor_type(tensorType)
    else:
        tensorType = 'torch.FloatTensor'
        torch.set_default_tensor_type(tensorType)
    """
    # Set all seeds to ensure reproducibility
    random.seed(0)
    np.random.seed(1)
    torch.manual_seed(0)

    # Load configuration
    abs_config_dir = os.path.abspath("conf")
    with initialize_config_dir(config_dir=abs_config_dir):
        # compose from config.yaml, this composes a bunch of defaults in:
        cfg = hydra_compose(config_name="torch_robot_model_gt.yaml")
    robot_model = DifferentiableRobotModel(**cfg.model)
    test_case = sample_test_case(robot_model)

    return {"robot_model": robot_model, "test_case": test_case}
Пример #4
0
def test_omniglot_integration(adaptation_method, experiment_setting):
    def fetch_omniglot(dir_path):
        omniglot_dir = os.path.join(dir_path, "omniglot")
        os.makedirs(omniglot_dir, exist_ok=False)
        for suffix in ["small1", "small2"]:
            name = f"images_background_{suffix}.zip"
            url = OMNIGLOT_URL + name
            tmp_path = os.path.join(dir_path, "tmp.zip")
            urllib.request.urlretrieve(url, tmp_path)
            with zipfile.ZipFile(tmp_path, "r") as zip_ref:
                zip_ref.extractall(dir_path)
            extracted_dir = os.path.join(dir_path, name.split(".")[0])
            for category_dir in glob.glob(os.path.join(extracted_dir, "*")):
                category_name = os.path.basename(category_dir)
                shutil.move(category_dir,
                            os.path.join(omniglot_dir, category_name))

    with tempfile.TemporaryDirectory() as dir_path:
        logger.info(f"Fetching omniglot...")
        fetch_omniglot(dir_path)
        data_path = os.path.join(dir_path, "omniglot")
        cfg = hydra_compose(
            config_file="config.yaml",
            overrides=[
                f"adaptation={adaptation_method}",
                f"test=omniglot/{experiment_setting}",
                f"data=omniglot",
                f"network=omniglot",
                f"data.source.data_dir={data_path}",
            ],
            strict=False,
        )
        if cfg.train is not None:
            train(cfg, work_dir=dir_path)
        if cfg.eval is not None:
            evaluate(cfg, work_dir=dir_path)
Пример #5
0

class NMSELoss(torch.nn.Module):
    def __init__(self, var):
        super(NMSELoss, self).__init__()
        self.var = var

    def forward(self, yp, yt):
        err = (yp - yt)**2
        werr = err/self.var
        return werr.mean()


abs_config_dir=os.path.abspath(os.path.join(differentiable_robot_model.__path__[0], "../conf"))
with initialize_config_dir(config_dir=abs_config_dir):
    learnable_robot_model_cfg = hydra_compose(config_name="torch_robot_model_learnable_l4dc_constraints.yaml")


# ground truth robot model (with known kinematics and dynamics parameters) - used to generate data
gt_robot_model = DifferentiableKUKAiiwa()
gt_robot_model.print_link_names()

#train_data = generate_sine_motion_inverse_dynamics_data(gt_robot_model, n_data=1000, dt=1.0/250.0, freq=0.05)
train_loader = DataLoader(dataset=train_data, batch_size=100, shuffle=False)

# learnable robot model
urdf_path = os.path.join(diff_robot_data.__path__[0], learnable_robot_model_cfg.model.rel_urdf_path)
learnable_robot_model = DifferentiableRobotModel(urdf_path,
                                                 learnable_robot_model_cfg.model.learnable_rigid_body_config,
                                                 learnable_robot_model_cfg.model.name)
optimizer = torch.optim.Adam(learnable_robot_model.parameters(), lr=1e-2)
class NMSELoss(torch.nn.Module):
    def __init__(self, var):
        super(NMSELoss, self).__init__()
        self.var = var

    def forward(self, yp, yt):
        err = (yp - yt)**2
        werr = err / self.var
        return werr.mean()


abs_config_dir = os.path.abspath(
    os.path.join(differentiable_robot_model.__path__[0], "../conf"))
with initialize_config_dir(config_dir=abs_config_dir):
    learnable_robot_model_cfg = hydra_compose(
        config_name="torch_robot_model_learnable_dynamics_solo.yaml")

# ground truth robot model (with known kinematics and dynamics parameters) - used to generate data
filename = '/Users/paarth/Software/motion_planning/kino_dyn/data/solo12_data.csv'
data = open(filename, 'rt')
reader = csv.reader(data, delimiter=',')
x = list(reader)
data = numpy.array(x).astype('float')
train_data = data

#train_data = generate_sine_motion_inverse_dynamics_data(gt_robot_model, n_data=1000, dt=1.0/250.0, freq=0.05)
train_loader = DataLoader(dataset=train_data, batch_size=100, shuffle=False)

# learnable robot model
urdf_path = os.path.join(diff_robot_data.__path__[0],
                         learnable_robot_model_cfg.model.rel_urdf_path)
Пример #7
0
from differentiable_robot_model.data_generation_utils import (
    generate_random_forward_kinematics_data, )
import differentiable_robot_model
import diff_robot_data

torch.set_printoptions(precision=3, sci_mode=False)
random.seed(0)
np.random.seed(1)
torch.manual_seed(0)

abs_config_dir = os.path.abspath(
    os.path.join(differentiable_robot_model.__path__[0], "../conf"))
# we load a learnable robot model
with initialize_config_dir(config_dir=abs_config_dir):
    # which parameters are learnable is specified in the config file
    learnable_robot_model_cfg = hydra_compose(
        config_name="torch_robot_model_learnable_kinematics.yaml")

gt_robot_model = DifferentiableKUKAiiwa()
urdf_path = os.path.join(diff_robot_data.__path__[0],
                         learnable_robot_model_cfg.model.rel_urdf_path)
learnable_robot_model = DifferentiableRobotModel(
    urdf_path,
    learnable_robot_model_cfg.model.learnable_rigid_body_config,
    learnable_robot_model_cfg.model.name,
)

train_data = generate_random_forward_kinematics_data(gt_robot_model,
                                                     n_data=100,
                                                     ee_name="iiwa_link_ee")
q = train_data["q"]
gt_ee_pos = train_data["ee_pos"]