def saveVarsInMat(filename, varNamesStr, outOf=None, **opts): """Hacky convinience function to dump a couple of python variables in a .mat file. See `awmstools.saveVars`. """ from mlabwrap import mlab filename, varnames, outOf = __saveVarsHelper( filename, varNamesStr, outOf, '.mat', **opts) try: for varname in varnames: mlab._set(varname, outOf[varname]) mlab._do("save('%s','%s')" % (filename, "', '".join(varnames)), nout=0) finally: assert varnames mlab._do("clear('%s')" % "', '".join(varnames), nout=0)
def saveVarsInMat(filename, varNamesStr, outOf=None, **opts): """Hacky convinience function to dump a couple of python variables in a .mat file. See `awmstools.saveVars`. """ from mlabwrap import mlab filename, varnames, outOf = __saveVarsHelper(filename, varNamesStr, outOf, '.mat', **opts) try: for varname in varnames: mlab._set(varname, outOf[varname]) mlab._do("save('%s','%s')" % (filename, "', '".join(varnames)), nout=0) finally: assert varnames mlab._do("clear('%s')" % "', '".join(varnames), nout=0)
def dumpPathsToMatlab(self, fileName, **trackOptions): self.track(**trackOptions) from mlabwrap import mlab allPaths = [] for info in self.infos: try: paths = self._loadPaths(info) except: continue print info.uniqueName frames = _makeCells([v.array for v in paths.velocities], mlab) allPaths.append(mlab.struct("info", _infoToMatlab(paths.info, mlab), "frames", _wrapCellValue(frames))) mlab._set("paths", _toStructArray(allPaths)) mlab.save(fileName, "paths")
def dumpPathsToMatlab(self, fileName, **trackOptions): self.track(**trackOptions) from mlabwrap import mlab allPaths = [] for info in self.infos: try: paths = self._loadPaths(info) except: continue print info.uniqueName frames = _makeCells([v.array for v in paths.velocities], mlab) allPaths.append( mlab.struct("info", _infoToMatlab(paths.info, mlab), "frames", _wrapCellValue(frames))) mlab._set("paths", _toStructArray(allPaths)) mlab.save(fileName, "paths")
def dumpCorrelationToMatlab(self, correlation, fileName, **analysisOptions): results = self._performAnalysis(correlation, **analysisOptions) results = self._organizeByGroup(results) from mlabwrap import mlab groups = [] for group, correlations in results.iteritems(): wrapped = [] for i, data in enumerate(correlations): info = _infoToMatlab(data.info, mlab) points = np.array(data.points) wrapped.append(mlab.struct("info", info, "points", points, "name", str(data.name))) groups.append(mlab.struct("group", group, "correlations", _toStructArray(wrapped))) mlab._set(correlation, _toStructArray(groups)) mlab.save(fileName, correlation)
def dumpCorrelationToMatlab(self, correlation, fileName, **analysisOptions): results = self._performAnalysis(correlation, **analysisOptions) results = self._organizeByGroup(results) from mlabwrap import mlab groups = [] for group, correlations in results.iteritems(): wrapped = [] for i, data in enumerate(correlations): info = _infoToMatlab(data.info, mlab) points = np.array(data.points) wrapped.append( mlab.struct("info", info, "points", points, "name", str(data.name))) groups.append( mlab.struct("group", group, "correlations", _toStructArray(wrapped))) mlab._set(correlation, _toStructArray(groups)) mlab.save(fileName, correlation)
# This is a primitive way to check for memory leaks. # Run with ``python EatMem.py`` and use some memory watching tool # (like ``top`` under unix) to see if the memory consumpiton goes up # by steps while the program is running (which would suggest a memory leak). import gc from mlabwrap import mlab try: import numpy from numpy.random import rand, randn toscalar = lambda a: a.item() except ImportError: import Numeric as numpy from MLab import rand, randn toscalar = lambda a: a.toscalar() import time #gc.set_debug(gc.DEBUG_LEAK) myMat = numpy.ones((1000**2, 2), 'd') mlab._set('mymat', myMat) raw_input("Press return when ready!") for i in range(10): mlab.sum(myMat, nout=0) for i in range(1000): s = mlab.sin(rand(100, 100)) #print loads of crap to make sure this won't screw up for i in range(20): mlab.sin(rand(100, 100), nout=0) assert numpy.alltrue(myMat.flat == mlab._get('mymat', 1).flat) gc.collect() raw_input("Press return to finish")
# Run with ``python EatMem.py`` and use some memory watching tool # (like ``top`` under unix) to see if the memory consumpiton goes up # by steps while the program is running (which would suggest a memory leak). import gc from mlabwrap import mlab try: import numpy from numpy.random import rand, randn toscalar = lambda a:a.item() except ImportError: import Numeric as numpy from MLab import rand, randn toscalar = lambda a:a.toscalar() import time #gc.set_debug(gc.DEBUG_LEAK) myMat = numpy.ones((1000**2,2), 'd') mlab._set('mymat', myMat) input("Press return when ready!") for i in range(10): mlab.sum(myMat,nout=0) for i in range(1000): s = mlab.sin(rand(100,100)) #print loads of crap to make sure this won't screw up for i in range(20): mlab.sin(rand(100,100), nout=0) assert numpy.alltrue(myMat.flat == mlab._get('mymat',1).flat) gc.collect() input("Press return to finish")