Beispiel #1
0
def load_FAUST_scan(dpmp, path, isTest):
    """
    Load a scan from the Faust dataset
    """

    if isTest:
        filename = path
    else:
        filename = path

    print 'loading ' + filename
    mym = myMesh(filename=filename)

    points = mym.v

    # Center data
    dpmp.scanCenter = np.mean(points, axis=0)
    points = points - dpmp.scanCenter

    # Build kdtree for likelihood computation
    from scipy.spatial import cKDTree
    dpmp.kdtree = cKDTree(points)
    dpmp.kdpoints = points

    # Store mesh and normals
    mym.v = points
    dpmp.scanMesh = mym
    dpmp.scanMeshNormals = mym.estimate_vertex_normals()
Beispiel #2
0
def compute_3D_likelihood(dpmp, part, x, return_normals_cost=False):
    """
    Computes the likelihood for the particles x (dim * nParticles) of the part part
    """

    nParticles = x.shape[1]
    logL = np.zeros((nParticles))
    zScore = np.zeros((nParticles))
    mean=np.zeros(dpmp.nB[part])
    sigma=dpmp.body.posePCA[part]['sigma'][0:dpmp.nB[part]]
    cov = sigma**2
    nScore = np.zeros((nParticles))
    if dpmp.likelihoodAlpha[part] == 0:
        return dpmp.likelihoodAlpha[part]*logL

    for p in xrange(nParticles):
        Pw = particle_to_points(dpmp, x[:,p], part)
        Pws = Pw[0::dpmp.resolStep,:]
        out = dpmp.kdtree.query(Pw[0::dpmp.resolStep,:])

        # Also have a cost for matching the normals
        if dpmp.compute_normals_cost:
            normals = dpmp.scanMeshNormals[out[1],:]
            mesh = myMesh(v=Pw, f=dpmp.body.partFaces[part])
            K = mesh.estimate_vertex_normals()
            L = K[0::dpmp.resolStep,:]
            angle = np.arccos(np.sum(normals*L, axis=1))
            # Penalty for normals with opposite direction. Arccos returns the angle in [0,pi]
            opp = np.where( angle > 3*np.pi/4 )
            nScore[p] = - 0.005*len(opp[0]) 

        # Robust function 
        logL[p] = -np.mean((out[0]**2+dpmp.robustOffset)**dpmp.robustGamma)

    logL = logL + zScore + nScore
    if return_normals_cost:
        return dpmp.likelihoodAlpha[part]*(logL), nScore 
    else:
        return dpmp.likelihoodAlpha[part]*logL
Beispiel #3
0
def run(nParticles, nSteps, basePath, faustId, outputPath, isTest, params, code, seed, frameId, genderArgv, lastResult=None):
    np.random.seed(seed)
    nBtorso = 12 
    nB =  5 
    nBshape = 4 

    gender = 'male'
    sbmModel = "model_ho_male_5_reduced"
    if genderArgv == 'female':
        gender = 'female'
        sbmModel = "model_ho_female_5_reduced"

    b = sbm.Sbm(basePath)
    b.gender = gender
    
    d = dpmp.Dpmp(b, nBtorso, nB, nBshape, nParticles, 0, sbmModel)

    d.body.fixedShape = False
    d.compute_normals_cost = True
    d.select_msg = True
    d.probRandomWalk = 0.5
    d.use_map_particle_for_rnd_walk = False
    d.LMsteps = 5 # Actually is 4
    d.init_torso_location = np.zeros((3))
    d.init_torso_rotation = np.zeros((3))
    d.init_with_global_rotation = True
    d.springSigma = 0 
    d.display = -1
    d.verbose = 1
    d.likelihoodType = '3Dlikelihood' 
    if d.verbose > 0:
        print 'MODEL ' + sbmModel
        print 'GENDER ' + d.body.gender

    # Inference parameters
    d.particle_genericSigma = params['genericSigma']; d.particle_rSigma = params['rSigma']; d.particle_tSigma = params['tSigma'] 
    d.particle_posePCAsigmaScale = params['posePCAsigmaScale']; d.particle_shapePCAsigmaScale = params['shapePCAsigmaScale'] 
    d.robustOffset = params['robustOffset']; d.robustGamma = params['robustGamma']; l_alphaNormal = params['l_alphaNormal']
    l_alphaLoose = params['l_alphaLoose']; l_alphaVeryLoose = params['l_alphaVeryLoose']; s_alphaNormal = params['s_alphaNormal']
    s_alphaLoose = params['s_alphaLoose']; s_alphaTight = params['s_alphaTight']; alphaRef = params['alphaRef']

    # When to change parameters during inference
    if ADAPTIVE_WEIGHTS:
        fullModelStart = nSteps/4
        refinementStart = 2*nSteps/4
        greedyStart = 3*nSteps/4
    else:
        fullModelStart = 1
        refinementStart = nSteps+1
        greedyStart = nSteps+1

    # Load one example to use as test data
    load_mesh.load_FAUST_scan(d, basePath+faustId, isTest)

    # Inference
    lower_parts = np.array([2, 0, 5, 10, 12, 4, 1, 15])
    logB = np.zeros((nSteps))

    if ADAPTIVE_WEIGHTS:
        d.likelihoodAlpha[:] =  l_alphaNormal 
        d.likelihoodAlpha[lower_parts] =  l_alphaVeryLoose 
        d.stitchAlpha = s_alphaNormal*np.ones((d.nNodes, d.nNodes))
        d.stitchAlpha[d.body.parts['ll_r'], d.body.parts['foot_r']] = s_alphaLoose
        d.stitchAlpha[d.body.parts['foot_r'], d.body.parts['ll_r']] = s_alphaLoose
        d.stitchAlpha[d.body.parts['ll_l'], d.body.parts['foot_l']] = s_alphaLoose
        d.stitchAlpha[d.body.parts['foot_l'], d.body.parts['ll_l']] = s_alphaLoose
        d.stitchAlpha[d.body.parts['la_r'], d.body.parts['hand_r']] = s_alphaLoose
        d.stitchAlpha[d.body.parts['hand_r'], d.body.parts['la_r']] = s_alphaLoose
        d.stitchAlpha[d.body.parts['la_l'], d.body.parts['hand_l']] = s_alphaLoose
        d.stitchAlpha[d.body.parts['hand_l'], d.body.parts['la_l']] = s_alphaLoose

        d.stitchAlpha[d.body.parts['ul_r'], d.body.parts['torso']] = s_alphaTight
        d.stitchAlpha[d.body.parts['torso'], d.body.parts['ul_r']] = s_alphaTight
        d.stitchAlpha[d.body.parts['ul_l'], d.body.parts['torso']] = s_alphaTight
        d.stitchAlpha[d.body.parts['torso'], d.body.parts['ul_l']] = s_alphaTight
    else:
        d.likelihoodAlpha[:] =  l_alphaNormal 
        d.stitchAlpha = s_alphaNormal*np.ones((d.nNodes, d.nNodes))

    d.nSteps = nSteps 
    for s in range(nSteps):
        d.step = s
        if ADAPTIVE_WEIGHTS:
            if s == fullModelStart:
                d.likelihoodAlpha[lower_parts] = l_alphaNormal
                d.likelihoodAlpha[d.body.parts['hand_r']] = l_alphaLoose
                d.likelihoodAlpha[d.body.parts['hand_l']] = l_alphaLoose
                d.likelihoodAlpha[d.body.parts['foot_r']] = l_alphaLoose
                d.likelihoodAlpha[d.body.parts['foot_l']] = l_alphaLoose
                d.stitchAlpha = s_alphaNormal*np.ones((d.nNodes, d.nNodes))
                d.stitchAlpha[d.body.parts['ul_r'], d.body.parts['torso']] = s_alphaTight
                d.stitchAlpha[d.body.parts['torso'], d.body.parts['ul_r']] = s_alphaTight
                d.stitchAlpha[d.body.parts['ul_l'], d.body.parts['torso']] = s_alphaTight
                d.stitchAlpha[d.body.parts['torso'], d.body.parts['ul_l']] = s_alphaTight
                # Recompute the likelihood of the particles as I have changed the weight
                d.compute_normals_cost = True
                for v in d.nodeIdx:
                    new_L = particles.compute_likelihood(d, v, d.b[v]['x'])
                    d.b[v]['L'] = new_L.copy()

            # Refinement
            if s == refinementStart:
                d.particle_genericSigma = alphaRef*d.particle_genericSigma
                d.particle_rSigma = alphaRef*d.particle_rSigma
                d.particle_tSigma = alphaRef*d.particle_tSigma
                d.particle_posePCAsigmaScale = alphaRef*d.particle_posePCAsigmaScale
                d.particle_shapePCAsigmaScale = alphaRef*d.particle_shapePCAsigmaScale
                d.stitchAlpha = s_alphaTight*np.ones((d.nNodes, d.nNodes))

            # Greedy resampling around the best solution
            if s == greedyStart:
                d.select_msg = False # Use m-best instead
                d.probRandomWalk = 1.0
                d.use_map_particle_for_rnd_walk = True

            tic = time.time()

            logB[s] = run_DPMP_step(d, s, frameId, lastResult)
            toc = time.time() - tic

            #if d.display ==4: 
                #show_all_particles(d, faustId, nParticles, s)

            if d.verbose > 0:
                #print str(s) + ' time to run DPMP step: ' + str(toc)
                logPos, logL, logP = compute_model_log_posterior(d)
                #print 'iter ' + str(s) + ' logPos= ' + str(logPos) + ' logL= ' + str(logL) + ' logP= ' + str(logP) 
            #print str(s) + ': ' + str(logB[s])
            #filename = 'faustID_' + faustId + '_' + str(seed) + '_' + str(s) + '.png'
            #if d.display > 0:
                #ba.show_me(d.body, scan=d.scanMesh, filename='dpmp_step_'+faustId + '_' +str(s)+'.png')


    # Show the solution
    #if d.display > 0:
        #filename = code + 'faustID_' + faustId + '_' + str(seed) + '.png'
        #ba.show_me(d.body, dbstop=False, scan=d.scanMesh, filename=filename)

    #if d.verbose > 0:
        #print 'negative energy at each iteration:'
        #print logB

    # Save the result as a single mesh
    v, f, joints, skeleton = ba.sbm_to_scape_mesh(d.body, d.scanCenter)
    mesh_data = {'v':v, 'f':f} 

    filename = basePath+outputPath+'.pkl'
    #dpmp.save_dpmp(d, logB, params, mesh_data, filename)
    dpmp.show_result(filename)

    # Save in ply
    from my_mesh.mesh import myMesh
    m = myMesh(v=v, f=f, e=[])
    filename = basePath+outputPath + '.ply'
    m.save_ply(filename)

    # My code starts:
    #print len(skeleton)
    m = myMesh(v=joints, f=[], e = skeleton)
    filename = basePath+outputPath +'_skeleton.ply'
    m.save_ply(filename)
    # My code ends:

    #save landMark file
    filename = basePath+outputPath + '.lnd'
    m.save_lnd(filename)

    return d
'''
    This is a short demo to see how to load and use the SMAL model.
    Please read the README.txt file for requirements.

'''

from smpl_webuser.serialization import load_model
from my_mesh.mesh import myMesh
import pickle as pkl
import numpy as np
# Load the smal model
model_path = 'smal_CVPR2017.pkl'
model = load_model(model_path)

# Save the mean model
m = myMesh(v=model.r, f=model.f)
m.save_ply('smal_mean_shape.ply')
print 'saved mean shape'

# Load the family clusters data (see paper for details)
# and save the mean per-family shape
# 0-felidae(cats); 1-canidae(dogs); 2-equidae(horses);
# 3-bovidae(cows); 4-hippopotamidae(hippos);
# The clusters are over the shape coefficients (betas);
# setting different betas changes the shape of the model
model_data_path = 'smal_CVPR2017_data.pkl'
data = pkl.load(open(model_data_path))
print(data['cluster_cov'])

for i, betas in enumerate(data['cluster_means']):
    if not (i == 4):