def write_tf():
    data = ut.load('../results/grasp-dset/v5/train.pk')
    tf_file = '../results/grasp-dset/v5/train.tf'
    assert not os.path.exists(tf_file)
    writer = tf.python_io.TFRecordWriter(tf_file)
    for d in ut.time_est(data):
        fbl = lambda x: tf.train.Feature(bytes_list=tf.train.BytesList(value=
                                                                       [x]))
        fl = lambda x: tf.train.Feature(float_list=tf.train.FloatList(
            value=map(float, x.flatten())))
        il = lambda x: tf.train.Feature(int64_list=tf.train.Int64List(value=x))

        feat = {
            'gel0_pre': fbl(d['gel0_pre']),
            'gel1_pre': fbl(d['gel1_pre']),
            'gel0_post': fbl(d['gel0_post']),
            'gel1_post': fbl(d['gel1_post']),
            'im0_pre': fbl(d['im0_pre']),
            'im0_post': fbl(d['im0_post']),
            'im1_pre': fbl(d['im1_pre']),
            'im1_post': fbl(d['im1_post']),
            'depth0_pre': fl(d['depth0_pre']),
            'depth0_post': fl(d['depth0_post']),
            'end_effector': fl(d['end_effector']),
            'initial_press_prob': fl(d['initial_press_prob']),
            'is_gripping': il([d['is_gripping']])
        }
        ex = tf.train.Example(features=tf.train.Features(feature=feat))
        writer.write(ex.SerializeToString())
    writer.close()
Esempio n. 2
0
def test(path):
    train_dir = pj(path, 'training')
    check_path = tf.train.latest_checkpoint(train_dir)
    print 'Restoring from:', check_path
    net = NetClf(check_path, gpu)
    data = ut.load(pj(path, 'test.pk'))

    labels = []
    probs = []
    accs = []
    for i in xrange(len(data)):
        ex = data[i]
        label = ex['is_gripping']
        ex = {k: ig.uncompress(ex[k]) for k in im_names}
        prob = net.predict(**ex)
        print prob, label
        pred = int(prob >= 0.5)

        labels.append(label)
        probs.append(prob)
        accs.append(pred == label)
    labels = np.array(labels, 'bool')
    probs = np.array(probs, 'float32')
    accs = np.array(accs)

    print 'Accuracy:', np.mean(accs)
    print 'mAP:', sklearn.metrics.average_precision_score(labels, probs)
def train_press(pr):
    data = ut.load(pj(pr.dsdir, 'train.pk'))
    xs, ys = [], []
    for ex in data:
        xs.append([ex['initial_press_prob']])
        ys.append(ex['is_gripping'])
    xs = np.array(xs, 'float32')
    ys = np.array(ys, 'int64')

    clf = sklearn.svm.SVC(C=1., kernel='linear')
    clf.fit(xs, ys)
    ut.save(pj(pr.resdir, 'clf.pk'), clf)
def eval_all(run=True, use_gpu=False,test_on_train=False):
  print """\\begin{tabular}"""
  for pr in all_params():
    if run:
      if use_gpu:
        grasp_net.test(pr, gpus[0], test_on_train = test_on_train)
      else:
        grasp_net.test(pr, None, test_on_train = test_on_train)
    out_file = pj(pr.resdir, 'eval_results.pk')
    if os.path.exists(out_file):
      r = ut.load(out_file)
      print '%s & %s%% & %s%% \\\\' % (pr.description, ut.f2(100*r['acc']), ut.f2(100*r['ap']))
  print """\\end{tabular}"""    
def train_clf(pr):
    data = ut.load(pj(pr.dsdir, 'train.pk'))
    xs, ys = [], []
    for ex in data:
        ex = copy.copy(ex)
        for k, v in ex.items():
            if k.startswith('gel'):
                ex[k] = ut.crop_center(ig.uncompress(v), 224)
        xs.append(example_feats(ex, pr))
        ys.append(ex['is_gripping'])
    xs = np.array(xs, 'float32')
    ys = np.array(ys, 'int64')
    clf = sklearn.pipeline.Pipeline([
        ('scale',
         sklearn.preprocessing.StandardScaler(with_mean=True, with_std=True)),
        ('svm', sklearn.svm.SVC(C=1., kernel='linear'))
    ])
    clf.fit(xs, ys)
    ut.save(pj(pr.resdir, 'clf.pk'), clf)
def analyze(pr):
    eval_exs = ut.load(pj(pr.resdir, 'eval.pk'))
    # accuracy by object
    by_name = ut.accum_dict((ex.object_name, ex) for ex in eval_exs)
    accs, labels = [], []
    for name in by_name:
        exs = by_name[name]
        accs.append(np.mean(ut.mapattr(exs).acc))
        labels.append(np.mean(ut.mapattr(exs).label))
        print name, ut.f4(accs[-1]), ut.f4(labels[-1])
    print 'Object-averaged accuracy:', ut.f4(np.mean(accs))
    print 'Object-averaged base:', ut.f4(np.mean(labels))

    chosen = set()
    table = []
    for ex in sorted(exs, key=lambda x: x.prob)[::-1]:
        if ex.object_name not in chosen:
            chosen.add(ex.object_name)
            print ex.object_name
            row = vis_example(ex.db_file)
            row = ['Prob:', ex.prob, 'Label:', ex.label] + row
            table.append(row)
    ig.show(table, rows_per_page=25)
def test(pr, gpu, test_on_train=False, center_crop=True):
    [gpu] = set_gpus([gpu])

    if pr.inputs == ['press']:
        net = PressClf(pr)
    else:
        #check_path = tf.train.latest_checkpoint(pr.train_dir)
        check_path = pj(pr.train_dir, 'net.tf-%d' % pr.model_iter)
        print 'Restoring from:', check_path
        net = NetClf(pr, check_path, gpu)

    if test_on_train:
        print 'Testing on train!'
        data = ut.load(pj(pr.dsdir, 'train.pk'))
    else:
        data = ut.load(pj(pr.dsdir, 'test.pk'))

    labels, probs, accs, vals = [], [], [], []
    for i in xrange(len(data)):
        ex = data[i]
        label = ex['is_gripping']

        def load_im(k, v):
            if k.startswith('gel') or k.startswith('im'):
                im = ig.uncompress(v)
            elif k.startswith('depth'):
                #v = np.tile(v, (1, 1, 3))
                im = v.astype('float32')
            else:
                raise RuntimeError()
            if center_crop:
                im = ut.crop_center(im, 224)
            return im

        inputs = {k: load_im(k, ex[k]) for k in im_names}
        inputs['initial_press_prob'] = ex['initial_press_prob']
        inputs['ee'] = ex['end_effector']

        pred, prob = net.predict(**inputs)
        #print prob, pred, label
        labels.append(label)
        probs.append(prob)
        accs.append(pred == label)
        if i % 50 == 0:
            print 'running average acc:', ut.f3(np.mean(accs))
        vals.append(
            ut.Struct(label=label,
                      prob=prob,
                      acc=accs[-1],
                      idx=i,
                      db_file=ex['db_file'],
                      object_name=ex['object_name']))

    labels = np.array(labels, 'bool')
    probs = np.array(probs, 'float32')
    accs = np.array(accs)

    acc = np.mean(accs)
    ap = sklearn.metrics.average_precision_score(labels, probs)
    print 'Accuracy:', acc
    print 'mAP:', ap
    print 'Base rate:', ut.f3(
        np.array(ut.mapattr(vals).label).astype('float32').mean())

    ut.save(pj(pr.resdir, 'eval_results.pk'),
            dict(acc=acc, ap=ap, results=(labels, probs)))
    ut.save(pj(pr.resdir, 'eval.pk'), vals)
    return acc
 def __init__(self, pr):
     self.clf = ut.load(pj(pr.resdir, 'clf.pk'))
def test(pr, gpu, test_on_train=False, crop_type='center'):
    [gpu] = set_gpus([gpu])

    if ut.hastrue(pr, 'use_clf'):
        net = SVMClf(pr)
    else:
        #check_path = tf.train.latest_checkpoint(pr.train_dir)
        check_path = pj(pr.train_dir, 'net.tf-%d' % pr.model_iter)
        print 'Restoring from:', check_path
        net = NetClf(pr, check_path, gpu)

    if test_on_train:
        print 'Testing on train!'
        data = ut.load(pj(pr.dsdir, 'train.pk'))
    else:
        data = ut.load(pj(pr.dsdir, 'test.pk'))

    labels, probs, accs, vals = [], [], [], []
    for i in xrange(len(data)):
        ex = data[i]
        label = ex['is_gripping']

        def load_im(k, v):
            if k.startswith('gel') or k.startswith('im'):
                im = ig.uncompress(v)
            elif k.startswith('depth'):
                #v = np.tile(v, (1, 1, 3))
                im = v.astype('float32')
            else:
                raise RuntimeError()

            if crop_type == 'center':
                crops = [ut.crop_center(im, 224)]
            elif crop_type == 'multi':
                crops = []
                dh = (im.shape[0] - crop_dim)
                num_dim_samples = 3
                for y in np.linspace(0, dh, num_dim_samples).astype('l'):
                    dw = (im.shape[1] - crop_dim)
                    for x in np.linspace(0, dw, num_dim_samples).astype('l'):
                        crops.append(im[y:y + crop_dim, x:x + crop_dim])
            return ut.shuffled_with_seed(crops, k.split('_')[0] + str(i))

        all_inputs = {k: load_im(k, ex[k]) for k in im_names}
        ps = []
        for j in xrange(len(all_inputs['gel0_pre'])):
            inputs = {k: all_inputs[k][j] for k in im_names}
            inputs['initial_press_prob'] = ex['initial_press_prob']
            inputs['ee'] = ex['end_effector']
            _, prob = net.predict(**inputs)
            ps.append(prob)
        prob = np.mean(ps)
        pred = int(prob >= net.thresh)

        print prob, pred, label
        labels.append(label)
        probs.append(prob)
        accs.append(pred == label)
        print 'running average acc:', np.mean(accs)
        vals.append(
            ut.Struct(label=label,
                      prob=prob,
                      acc=accs[-1],
                      idx=i,
                      db_file=ex['db_file'],
                      object_name=ex['object_name']))

    labels = np.array(labels, 'bool')
    probs = np.array(probs, 'float32')
    accs = np.array(accs)

    acc = np.mean(accs)
    ap = sklearn.metrics.average_precision_score(labels, probs)
    print 'Accuracy:', acc
    print 'mAP:', ap
    print 'Base rate:', ut.f3(
        np.array(ut.mapattr(vals).label).astype('float32').mean())

    ut.save(pj(pr.resdir, 'eval_results.pk'),
            dict(acc=acc, ap=ap, results=(labels, probs)))
    ut.save(pj(pr.resdir, 'eval.pk'), vals)
 def __init__(self, pr):
     self.pr = pr
     self.clf = ut.load(pj(pr.resdir, 'clf.pk'))
     self.thresh = 0.