Example #1
0
def init_matlab_toolbox(matlab_cobra_path=None):
    """initialize the matlab cobra toolbox, and load its functions
    into mlab's namespace (very useful for ipython tab completion)

    matlab_cobra_path: the path to the directory containing the MATLAB
    cobra installation. Using the default None will attempt to find the
    toolbox in the MATLAB path"""
    if matlab_cobra_path is None:
        matlab_cobra_path, tmp = os.path.split(
            cobra.matlab.which('initCobraToolbox'))
    if not os.path.isfile(os.path.join(matlab_cobra_path, "initCobraToolbox.m")):
        print "initCobraToolbox not found in given path"
        return
    # store the current directory so we can return to it
    curdir = os.path.abspath(os.curdir)
    # if the user has a pathdef file, it will not get used by the MATLAB
    # engine, so it needs to be called manually
    try:
        os.chdir(matlab_home)
        matlab.path(matlab.pathdef())
    except:
        pass
    os.chdir(matlab_cobra_path)
    matlab.initCobraToolbox()
    os.chdir(curdir)
    
    # discover MATLAB functions
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        os.path.walk(matlab_cobra_path, _probe, None)
        os.path.walk(mlab_path, _probe, None)
        for function in _useful_matlab_functions:
            exec("matlab.%s.__doc__" % function)
Example #2
0
 def add_path(path):
     from mlabwrap import mlab
     path_str = mlab.path(nout=1)
     paths = path_str.split(';')
     for p in paths:
         print p
     if not path in paths:
         mlab.path(path_str, path)
Example #3
0
def testForSource(Ai, Aj, Av, Gcollab_base, feature_graphs, src, beta):
	# Get the features from G for the given source node
	features = getFeatures.get_walk_features(Gcollab_base, feature_graphs, src)
	newAi= Ai
	newAj= Aj
	# convert nodeorder into a dict for mapping
	# mapping[oldID]= newID
	# nodeorder[newID-1]= oldID
	# newID starts from 1, and not 0

	mapping = {}
	rmap = {}
	
	cnt = 1
	flat_features = []

	for tup, feature in features.iteritems():
		v = tup[1]
		mapping[v] = cnt
		rmap[cnt] = v

		flat_features.append(map(float, feature))
		cnt+=1
	
	for i in range(len(Ai)):
		newAi[i]= mapping[Ai[i]]
		newAj[i]= mapping[Aj[i]]
	
	newSrcID = mapping[src]

	np_Ai, np_Aj, np_Av = interface.sparsePyToMat(Ai, Aj, Av)

	np_F = np.array(flat_features)
	mlab.path(mlab.path(), "learning/")

	#print np_Ai, np_Aj, np_Av, np_F, newSrcID, beta

	np_newIDs= mlab.runRW(np_Ai, np_Aj, np_Av, np_F, newSrcID, beta)
	newIDs= np_newIDs.tolist()

	# convert newIDs to original IDs
	origIDs= []

	for new_id in newIDs:
		origIDs.append(rmap[new_id[0]])

	return origIDs
Example #4
0
def get_eval_result():
    '''
        This will invoke matlab-wider-eval script to perform evaluation( will overwrite any previous result)
        And load result mat files into here, then return them
    '''

    import matlab.engine  # package install see: https://www.mathworks.com/help/matlab/matlab_external/install-the-matlab-engine-for-python.html
    from mlabwrap import mlab  # package install see: http://mlabwrap.sourceforge.net/#installation
    # make sure 'matlab' executable is in your $PATH
    single, _ = make_dirs(mat_bak=True)
    current_pwd = os.getcwd()
    print "Current location: ", current_pwd
    if not DEBUG: mlab.path(mlab.path(), eval_root)
    os.chdir(eval_root)
    print "Executing matlab, Please wait..."
    if not DEBUG: mlab.my_wider_eval('./val')
    result_dir = os.path.join(os.getcwd(),
                              'plot/baselines/Val/setting_int/Ours')
    os.chdir(current_pwd)
    D = load_result_mat(result_dir, single)
    return D, current_pwd
Example #5
0
def main(args):
    db = args[0]
    date1 = args[1]
    date2 = args[2]
    date3 = args[3]
    k = int(args[4])
    basename = args[5]

    reader = DBReader(db)
    print("Getting uid")
    uid = reader.uid()

    print("Getting all the feature graphs")
    feature_graphs = graphutils.get_feat_graphs(db, uid, None, date2)

    print("Getting Gcollab_delta graph")
    Gcollab_delta = graphutils.get_collab_graph(db, uid, date1, date2)
    Gcollab_base = graphutils.get_collab_graph(db, uid, date3, date1)

    base_graphs = graphutils.get_base_dict(Gcollab_base, feature_graphs)
    graphutils.print_stats(base_graphs)
    graphutils.print_graph_stats("Gcollab_delta", Gcollab_delta)

    filepath = os.path.join(LEARNING_ROOT, basename + ".mat")
    features_matrix_name = "%s_%s" % (basename, FEATURES)
    labels_matrix_name = "%s_%s" % (basename, LABELS)

    features = consolidateFeatures.consolidate_features_add(
        base_graphs, k, Gcollab_delta)
    #features = consolidateFeatures.consolidate_features(base_graphs, Gcollab_delta, k)
    labels = consolidateFeatures.consolidate_labels(features, Gcollab_delta)

    np_train, np_output = interface.matwrapTrain(features, labels)
    interface.writeTrain(np_train, np_output, filepath, features_matrix_name,
                         labels_matrix_name)

    # Add learning root to mlab path so that all .m functions are available as mlab attributes
    mlab.path(mlab.path(), LEARNING_ROOT)
    mlab.training(np_train, np_output)
Example #6
0
def main(args):
	db = args[0]
	date1 = args[1]
	date2 = args[2]
	date3 = args[3]
	k = int(args[4])
	basename = args[5]

	reader = DBReader(db)
	print("Getting uid")
	uid = reader.uid()

	print("Getting all the feature graphs")
	feature_graphs = graphutils.get_feat_graphs(db, uid, None, date2)

	print("Getting Gcollab_delta graph")
	Gcollab_delta = graphutils.get_collab_graph(db, uid, date1, date2)
	Gcollab_base = graphutils.get_collab_graph(db, uid, date3, date1)

	base_graphs = graphutils.get_base_dict(Gcollab_base, feature_graphs)
	graphutils.print_stats(base_graphs)
	graphutils.print_graph_stats("Gcollab_delta", Gcollab_delta)

	filepath = os.path.join(LEARNING_ROOT, basename + ".mat")
	features_matrix_name = "%s_%s"%(basename, FEATURES)
	labels_matrix_name = "%s_%s"%(basename, LABELS)

	features = consolidateFeatures.consolidate_features_add(base_graphs, k, Gcollab_delta)
	#features = consolidateFeatures.consolidate_features(base_graphs, Gcollab_delta, k)
	labels = consolidateFeatures.consolidate_labels(features, Gcollab_delta)

	np_train, np_output = interface.matwrapTrain(features, labels)
	interface.writeTrain(np_train, np_output, filepath, features_matrix_name, labels_matrix_name)	
	
	# Add learning root to mlab path so that all .m functions are available as mlab attributes
	mlab.path(mlab.path(), LEARNING_ROOT)	
	mlab.training(np_train, np_output)
    )



diff0 = (pred[:, :, :, 0].flatten() - targets_test[:, 0])
diff1 = (pred[:, :, :, 1].flatten() - targets_test[:, 1])
diff2 = (pred[:, :, :, 2].flatten() - targets_test[:, 2])
err1 = np.mean(diff0 * diff0)
err2 = np.mean(diff1 * diff1)
err3 = np.mean(diff2 * diff2)
error = (err1 + err2 + err3)/3

print "\nMean square error: {}".format(error)

print "\n\nDISPLAYING\n----------"



from mlabwrap import mlab
seunglab = "/home/luke/Documents/masters/code/seunglab"
matlabpath = "/home/luke/neuron-forests/matlab"
mlab.path(mlab.path(), seunglab + "/vis")
mlab.path(mlab.path(), seunglab + "/segmentation")
mlab.path(mlab.path(), matlabpath + "/vis")
# mlab.BrowseComponents('ii', targets_test.reshape(pred.shape).astype(float), pred)
mlab.vis2(im[tuple(idxs_test)].reshape((pred.shape[0], pred.shape[1], pred.shape[2])),
         targets_test.reshape(pred.shape).astype(float), pred, 0.85)

# mlab.path(mlab.path(), seunglab + "/analysis")
# mlab.plot_rand_error(pred, targets_test.reshape(pred.shape).astype(float))
Example #8
0
import os
from mlabwrap import mlab
mlab.path(mlab.path(), '../matlab')
hog = mlab.get_HOG('../seed_patches/1970393557375786_185_178_112_168.jpg')
print hog
Example #9
0
 def add_path(path):
   from mlabwrap import mlab
   path_str = mlab.path(nout=1)
   paths = path_str.split(';')
   if not path in paths:
     mlab.path(path_str, path)
import numpy as np
from mlabwrap import mlab
import pickle
#from motion import *
from python_exp2xyz import exp2xyz
import theano
import scipy.io
import pdb

motion_path = "/afs/cs.stanford.edu/u/barak/Workspace/human-motion-modeling/crbm_matlab/Motion/"
mlab.path(mlab.path(), motion_path)

def preprocess(n1, activities_file):
    Motion = mlab.preprocess(n1, activities_file)
    batchdata, seqlen, data_mean, data_std, offsets = _preprocess2_matlab(Motion)

    #shared_x = theano.shared(np.asarray(batchdata, dtype=theano.config.floatX))

    return batchdata, seqlen, data_mean, data_std, offsets

def postprocess(visible, data_std, data_mean, offsets):
	newdata = mlab.postprocess(visible, data_std, data_mean, offsets)

	return newdata

def get_joi(x_t, joi, data_mean, data_std, offsets):
    joi_indices = reduce(lambda x, y: x+y, map(lambda x: range(6*x, 6*(x+1)), joi))

    return postprocess(x_t, data_std.reshape((1, -1)), data_mean.reshape((1, -1)), offsets)[:, joi_indices]

def final_frame(z_t, final_frame_lookahead):