'''
Created on Jun 14, 2013
MCMC Sampler script for AoMR Simple Shape Grammar
@author: goker
'''

import numpy as np
from aomr_simple_grammar import AoMRSimpleShapeState, AoMRSimpleSpatialModel
from vision_forward_model import VisionForwardModel
from mcmc_sampler import MCMCSampler

if __name__ == '__main__':
    # AoMR Simple Shape Grammar, visual condition
    spatial_model = AoMRSimpleSpatialModel()
    forward_model = VisionForwardModel()
    data = np.load('data/visual/1.npy')
    state_params = {'b': 750.0}
    init_state = AoMRSimpleShapeState(forward_model, data, state_params, spatial_model)
    sampler_params ={'info' : 'aoMR Simple Grammar - Visual Condition', 
                 'runs' : 1,
                 'iters' : 5000, 
                 'keep_top_n' : 20, 
                 'burn_in' : 1000,
                 'thinning_period' : 400,
                 'random_move' : False,
                 'results_folder' : './',
                 'save_results' : True,
                 'verbose': True}
    ms = MCMCSampler(sampler_params, init_state)
    results = ms.run()
    print results
@author: goker
"""
import cPickle as pickle
from mcmc_sampler import *
from vision_forward_model import VisionForwardModel

# load results
fname = "./results/AoMRShapeGrammar_Visual_Obj820130624004057.mcmc"
f = open(fname)
results = pickle.load(f)
# print results
best1 = results.best_samples[0][0].state
print(best1)
best1.tree.show()

forward_model = VisionForwardModel()
i = 0
for sample in results.best_samples[0]:
    # sample.state.tree.show()
    print(sample.posterior)
    forward_model._view(sample.state)
    i += 1

# load results
fname = "./results/AoMRShapeGrammar_Visual_Obj220130624001257.mcmc"
f = open(fname)
results = pickle.load(f)
# print results
best2 = results.best_samples[0][0].state
print(best2)
best2.tree.show()
Created on Jun 17, 2013
AoMRShapeGrammar class tests
@author: goker
'''
from treelib import Tree
from pcfg_tree import ParseNode
from aomr_grammar import AoMRShapeState, AoMRSpatialModel
from vision_forward_model import VisionForwardModel
from haptics_forward_model import HapticsForwardModel
import numpy as np
import pandas as pd

if __name__ == '__main__':
    data = np.load('data/visual/2.npy')
    params = {'b': 9000.0}
    forward_model = VisionForwardModel()
    #h_forward_model = HapticsForwardModel(body_fixed=False)
    
    part1 = 'Bottom0'
    part2 = 'Front0'
    part3 = 'Top0'
    part4 = 'Ear1'
    # wrong parts for this object
    wpart1 = 'Bottom1'
    wpart2 = 'Front1'
    wpart3 = 'Top1'
    wpart4 = 'Ear0'
    
    
    # ======================================================================
    # TEST TREES: We look at the prior, likelihood and acceptance probabilities for
Example #4
0
        
    def __str__(self):
        return "".join(self.tree[node].tag.symbol+repr(self.spatial_model.positions[node]) for node in self.tree.expand_tree(mode=Tree.DEPTH) 
                       if self.tree[node].tag.symbol in self.grammar.terminals and
                          self.tree[node].tag.symbol is not 'Null')
        

if __name__ == '__main__':
    # best b values: for visual 750, for haptics 
        # object 1: 5000
        # object 2: 9000
        # object 3: 5000 (maybe lower)
        # object 4: 6000
    data = np.load('data/visual/1.npy')
    params = {'b': 750.0}
    forward_model = VisionForwardModel()
    #forward_model = HapticsForwardModel()
    
    # TEST TREES: We look at the prior, likelihood and acceptance probabilities for
    # empty tree, correct configuration (4 parts in correct positions) and tree with 
    # 1 part (ear) removed. Our purpose is to understand the b value we should set
    # to make sure correct configuration has the highest posterior.
      
    # Tree with no parts
    t1 = Tree()
    t1.create_node(ParseNode('S', 1), identifier='S')
    t1.create_node(ParseNode('Null', ''), parent='S')
     
    spatial_model1 = AoMRSimpleSpatialModel()
     
    rrs = AoMRSimpleShapeState(forward_model=forward_model, data=data, 
'''
Created on Jun 14, 2013
MCMC Sampler script for AoMR Simple Shape Grammar
@author: goker
'''

import numpy as np
from aomr_simple_grammar import AoMRSimpleShapeState, AoMRSimpleSpatialModel
from vision_forward_model import VisionForwardModel
from mcmc_sampler import MCMCSampler

if __name__ == '__main__':
    # AoMR Simple Shape Grammar, visual condition
    spatial_model = AoMRSimpleSpatialModel()
    forward_model = VisionForwardModel()
    data = np.load('data/visual/1.npy')
    state_params = {'b': 750.0}
    init_state = AoMRSimpleShapeState(forward_model, data, state_params,
                                      spatial_model)
    sampler_params = {
        'info': 'aoMR Simple Grammar - Visual Condition',
        'runs': 1,
        'iters': 5000,
        'keep_top_n': 20,
        'burn_in': 1000,
        'thinning_period': 400,
        'random_move': False,
        'results_folder': './',
        'save_results': True,
        'verbose': True
    }
@author: goker
'''
import cPickle as pickle
from mcmc_sampler import *
from vision_forward_model import VisionForwardModel

# load results
fname = './results/AoMRShapeGrammar_Visual_Obj820130624004057.mcmc'
f = open(fname)
results = pickle.load(f)
# print results
best1 = results.best_samples[0][0].state
print(best1)
best1.tree.show()

forward_model = VisionForwardModel()
i = 0
for sample in results.best_samples[0]:
    #sample.state.tree.show()
    print(sample.posterior)
    forward_model._view(sample.state)
    i += 1

# load results
fname = './results/AoMRShapeGrammar_Visual_Obj220130624001257.mcmc'
f = open(fname)
results = pickle.load(f)
# print results
best2 = results.best_samples[0][0].state
print(best2)
best2.tree.show()
Example #7
0
import numpy as np
from aomr_grammar import AoMRShapeState, AoMRSpatialModel
from vision_forward_model import VisionForwardModel
from mcmc_sampler import MCMCSampler
import sys
from haptics_forward_model import HapticsForwardModel

if __name__ == '__main__':
    # AoMR Simple Shape Grammar, visual condition
    obj_id = int(sys.argv[1])
    b = float(sys.argv[2])
    forward_model_type = sys.argv[3]  #v, h or vh
    spatial_model = AoMRSpatialModel()
    if forward_model_type == 'v':
        forward_model = VisionForwardModel(body_fixed=False)
        npy_path = 'data/visual'
        info = 'AoMRShapeGrammar Visual Obj{0:d}'.format(obj_id)
    elif forward_model_type == 'h':
        forward_model = HapticsForwardModel(body_fixed=False)
        npy_path = 'data/haptic'
        info = 'AoMRShapeGrammar Haptic Obj{0:d}'.format(obj_id)
    elif forward_model_type == 'vh':
        raise NotImplementedError()
    else:
        raise Exception('Forward model type can be v, h or vh')

    data = np.load('{0:s}/{1:d}.npy'.format(npy_path, obj_id))

    # visual condition
    # b parameter for objects