def testAll(experiments): experimentsDir = os.path.join( os.path.split(os.path.dirname(__file__))[:-1])[0] for experiment in experiments: experimentBase = os.path.join(os.getcwd(), experimentsDir, experiment) config, control = opfhelpers.loadExperiment(experimentBase) if control['environment'] == 'opfExperiment': experimentTasks = control['tasks'] task = experimentTasks[0] datasetURI = task['dataset']['streams'][0]['source'] elif control['environment'] == 'nupic': datasetURI = control['dataset']['streams'][0]['source'] metricSpecs = control['metrics'] datasetPath = datasetURI[len("file://"):] for i in xrange(1024, 2176, 128): #config['modelParams']['tmParams']['cellsPerColumn'] = 16 config['modelParams']['tmParams']['columnCount'] = i config['modelParams']['spParams']['columnCount'] = i print 'Running with 32 cells per column and %i columns.' % i start = time.time() result = runOneExperiment(config, control['inferenceArgs'], metricSpecs, datasetPath) print 'Total time: %d.' % (time.time() - start) pprint(result)
def testAll(experiments): experimentsDir = os.path.join(os.path.split( os.path.dirname(__file__))[:-1])[0] for experiment in experiments: experimentBase = os.path.join(os.getcwd(), experimentsDir, experiment) config, control = opfhelpers.loadExperiment(experimentBase) if control['environment'] == 'opfExperiment': experimentTasks = control['tasks'] task = experimentTasks[0] datasetURI = task['dataset']['streams'][0]['source'] elif control['environment'] == 'nupic': datasetURI = control['dataset']['streams'][0]['source'] metricSpecs = control['metrics'] datasetPath = datasetURI[len("file://"):] for i in xrange(1024, 2176, 128): #config['modelParams']['tpParams']['cellsPerColumn'] = 16 config['modelParams']['tpParams']['columnCount'] = i config['modelParams']['spParams']['columnCount'] = i print 'Running with 32 cells per column and {0:d} columns.'.format(i) start = time.time() result = runOneExperiment(config, control['inferenceArgs'], metricSpecs, datasetPath) print 'Total time: {0:d}.'.format((time.time() - start)) pprint(result)
def testAll(experiments): experimentsDir = os.path.join(os.path.split(os.path.dirname(__file__))[:-1])[0] for experiment in experiments: experimentBase = os.path.join(os.getcwd(), experimentsDir, experiment) config, control = opfhelpers.loadExperiment(experimentBase) if control["environment"] == "opfExperiment": experimentTasks = control["tasks"] task = experimentTasks[0] datasetURI = task["dataset"]["streams"][0]["source"] elif control["environment"] == "grok": datasetURI = control["dataset"]["streams"][0]["source"] metricSpecs = control["metrics"] datasetPath = datasetURI[len("file://") :] for i in xrange(1024, 2176, 128): # config['modelParams']['tpParams']['cellsPerColumn'] = 16 config["modelParams"]["tpParams"]["columnCount"] = i config["modelParams"]["spParams"]["columnCount"] = i print "Running with 32 cells per column and %i columns." % i start = time.time() result = runOneExperiment(config, control["inferenceArgs"], metricSpecs, datasetPath) print "Total time: %d." % (time.time() - start) pprint(result)
def getModelDescriptionLists(numProcesses, experiment): config, control = opfhelpers.loadExperiment(experiment) encodersList=getFieldPermutations(config, 'pounds') ns=range(50, 140, 120) clAlphas=np.arange(0.01, 0.16, 0.104) synPermInactives=np.arange(0.01, 0.16, 0.105) tpPamLengths=range(5, 8, 2) tpSegmentActivations=range(13, 17, 12) if control['environment'] == 'opfExperiment': experimentTasks = control['tasks'] task = experimentTasks[0] datasetURI = task['dataset']['streams'][0]['source'] elif control['environment'] == 'nupic': datasetURI = control['dataset']['streams'][0]['source'] metricSpecs = control['metrics'] datasetPath = datasetURI[len("file://"):] ModelSetUpData=[] name=0 for n in ns: for clAlpha in clAlphas: for synPermInactive in synPermInactives: for tpPamLength in tpPamLengths: for tpSegmentActivation in tpSegmentActivations: for encoders in encodersList: encodersmod=copy.deepcopy(encoders) configmod=copy.deepcopy(config) configmod['modelParams']['sensorParams']['encoders']=encodersmod configmod['modelParams']['clParams']['alpha']=clAlpha configmod['modelParams']['spParams']['synPermInactiveDec']=synPermInactive configmod['modelParams']['tpParams']['pamLength']=tpPamLength configmod['modelParams']['tpParams']['activationThreshold']=tpSegmentActivation for encoder in encodersmod: if encoder['name']==predictedField: encoder['n']=n ModelSetUpData.append((name,{'modelConfig':configmod, 'inferenceArgs':control['inferenceArgs'], 'metricSpecs':metricSpecs, 'sourceSpec':datasetPath,'sinkSpec':None,})) name=name+1 #print modelInfo['modelConfig']['modelParams']['tpParams'] #print modelInfo['modelConfig']['modelParams']['sensorParams']['encoders'][4]['n'] print "num Models"+str( len(ModelSetUpData)) shuffle(ModelSetUpData) #print [ (m[1]['modelConfig']['modelParams']['tpParams']['pamLength'], m[1]['modelConfig']['modelParams']['sensorParams']['encoders']) for m in ModelSetUpData] return list(chunk(ModelSetUpData,numProcesses))
def getModelDescriptionLists(numProcesses, experiment): config, control = opfhelpers.loadExperiment(experiment) encodersList = getFieldPermutations(config, 'pounds') ns = range(50, 140, 120) clAlphas = np.arange(0.01, 0.16, 0.104) synPermInactives = np.arange(0.01, 0.16, 0.105) tpPamLengths = range(5, 8, 2) tpSegmentActivations = range(13, 17, 12) if control['environment'] == 'opfExperiment': experimentTasks = control['tasks'] task = experimentTasks[0] datasetURI = task['dataset']['streams'][0]['source'] elif control['environment'] == 'nupic': datasetURI = control['dataset']['streams'][0]['source'] metricSpecs = control['metrics'] datasetPath = datasetURI[len("file://"):] ModelSetUpData = [] name = 0 for n in ns: for clAlpha in clAlphas: for synPermInactive in synPermInactives: for tpPamLength in tpPamLengths: for tpSegmentActivation in tpSegmentActivations: for encoders in encodersList: encodersmod = copy.deepcopy(encoders) configmod = copy.deepcopy(config) configmod['modelParams']['sensorParams'][ 'encoders'] = encodersmod configmod['modelParams']['clParams'][ 'alpha'] = clAlpha configmod['modelParams']['spParams'][ 'synPermInactiveDec'] = synPermInactive configmod['modelParams']['tpParams'][ 'pamLength'] = tpPamLength configmod['modelParams']['tpParams'][ 'activationThreshold'] = tpSegmentActivation for encoder in encodersmod: if encoder['name'] == predictedField: encoder['n'] = n ModelSetUpData.append((name, { 'modelConfig': configmod, 'inferenceArgs': control['inferenceArgs'], 'metricSpecs': metricSpecs, 'sourceSpec': datasetPath, 'sinkSpec': None, })) name = name + 1 #print modelInfo['modelConfig']['modelParams']['tpParams'] #print modelInfo['modelConfig']['modelParams']['sensorParams']['encoders'][4]['n'] print "num Models" + str(len(ModelSetUpData)) shuffle(ModelSetUpData) #print [ (m[1]['modelConfig']['modelParams']['tpParams']['pamLength'], m[1]['modelConfig']['modelParams']['sensorParams']['encoders']) for m in ModelSetUpData] return list(chunk(ModelSetUpData, numProcesses))