Esempio n. 1
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('dataset')
    parser.add_argument('graspable')
    parser.add_argument('--grasp-dir')
    args = parser.parse_args()

    dataset = database.Dataset(args.dataset, CONFIG)
    graspable = dataset[args.graspable]
    grasps = dataset.load_grasps(args.graspable, args.grasp_dir)

    visualize(grasps)
Esempio n. 2
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('dataset')
    parser.add_argument('graspable')
    parser.add_argument('--grasp_dir', default=None)
    args = parser.parse_args()

    dataset = database.Dataset(args.dataset, CONFIG)
    graspable = dataset[args.graspable]
    grasps = dataset.load_grasps(args.graspable, args.grasp_dir)

    rotated_grasps = []
    for g in grasps:
        rotated_grasps.extend(g.transform(graspable.tf, THETA_RES))
    visualize(graspable, rotated_grasps)
Esempio n. 3
0
def new_dataset():
    args = merge_http_request_arguments(True)

    user = database.User.query.filter_by(id=g.user_id).one()
    dataset_type = database.DatasetType.query.filter_by(id=int(args['dataset_type'])).one()
    dataset = database.Dataset(name=args['name'], user_created=user)
    dataset.short_notes = args['short_notes']
    dataset.long_notes = rass_app.LONG_NOTES
    dataset.user_modified = user
    date_created = datetime.strptime(args['date_created'], '%d.%m.%Y')
    dataset.date_created = date_created
    dataset.type = dataset_type
    database.db.session.add(dataset)
    database.db.session.commit()

    return render_template('datastore/dataset.html', uid=dataset.id, dataset=dataset)
Esempio n. 4
0
def load_data(path, config):
    precomputed = exists(path)

    training = db.Dataset(config['dataset'], config)
    all_grasps = []
    all_features = []

    for obj in training:
        obj_grasps = training.load_grasps(obj.key)
        all_grasps.extend(obj_grasps)

        if not precomputed:
            feature_loader = ff.GraspableFeatureLoader(obj, training.name, config)
            obj_features = feature_loader.load_all_features(obj_grasps)
            all_features.extend(obj_features)
            # break

    if precomputed:
        logging.info('Loading from %s', path)
        with h5py.File(path, 'r') as f:
            design_matrix = f['projection_window'][()]
        logging.info('Loaded.')
        return all_grasps, design_matrix

    num_grasps = len(all_grasps)
    design_matrix = np.zeros((num_grasps, 2 * config['window_steps']**2))

    i = 0
    for grasp, feature in zip(all_grasps, all_features):
        w1 = feature.extractors_[0]
        w2 = feature.extractors_[1]

        proj1 = w1.extractors_[0]
        proj2 = w2.extractors_[0]

        design_matrix[i, :] = np.concatenate([proj1.phi, proj2.phi])
        i += 1

    logging.info('Saving to %s', path)
    with h5py.File(path, 'w') as f:
        f['projection_window'] = design_matrix
    logging.info('Saved.')
    return all_grasps, design_matrix
Esempio n. 5
0
    logging.getLogger().setLevel(logging.INFO)

    # read config file
    config = ec.ExperimentConfig(args.config)
    chunk = db.Chunk(config)

    # make output directory
    dest = os.path.join(args.output_dest, chunk.name)
    try:
        os.makedirs(dest)
    except os.error:
        pass

    if 'priors_dataset' in config:
        priors_dataset = db.Dataset(config['priors_dataset'], config)
    else:
        priors_dataset = None

    # loop through objects, labelling each
    results = []
    for obj in chunk:
        if obj.key in skip_keys:
            continue

        logging.info('Labelling object {}'.format(obj.key))
        experiment_result = label_correlated(
            obj,
            chunk,
            config,
            priors_dataset=priors_dataset,