def train(self, training_file, num_records): """Create a network and training it on a CSV data source""" dataSource = FileRecordStream(streamID=training_file) dataSource.setAutoRewind(True) self._network = configureNetwork(dataSource, self.network_config) for i in xrange(num_records): # Equivalent to: network.run(num_records) self._network.run(1) self._network.save(self.trained_network_path)
def run(): """ Run classification network(s) on artificial sensor data """ with open("network_config_template.json", "rb") as jsonFile: templateNetworkConfig = json.load(jsonFile) networkConfigurations = generateSampleNetworkConfig( templateNetworkConfig, NUM_CATEGORIES) for networkConfig in networkConfigurations: for noiseAmplitude in WHITE_NOISE_AMPLITUDES: for signalMean in SIGNAL_MEANS: for signalAmplitude in SIGNAL_AMPLITUDES: for signalPeriod in SIGNAL_PERIODS: sensorType = networkConfig["sensorRegionConfig"].get( "regionType") spEnabled = networkConfig["sensorRegionConfig"].get( "regionEnabled") tmEnabled = networkConfig["tmRegionConfig"].get( "regionEnabled") upEnabled = networkConfig["tpRegionConfig"].get( "regionEnabled") classifierType = networkConfig[ "classifierRegionConfig"].get("regionType") expParams = ( "RUNNING EXPERIMENT WITH PARAMS:\n" " * numRecords=%s\n" " * signalAmplitude=%s\n" " * signalMean=%s\n" " * signalPeriod=%s\n" " * noiseAmplitude=%s\n" " * sensorType=%s\n" " * spEnabled=%s\n" " * tmEnabled=%s\n" " * tpEnabled=%s\n" " * classifierType=%s\n") % ( NUM_RECORDS, signalAmplitude, signalMean, signalPeriod, noiseAmplitude, sensorType.split(".")[1], spEnabled, tmEnabled, upEnabled, classifierType.split(".")[1]) print expParams inputFile = generateSensorData( DATA_DIR, OUTFILE_NAME, signalMean, signalPeriod, SEQUENCE_LENGTH, NUM_RECORDS, signalAmplitude, NUM_CATEGORIES, noiseAmplitude) dataSource = FileRecordStream(streamID=inputFile) network = configureNetwork(dataSource, networkConfig) partitions = generateNetworkPartitions( networkConfig, NUM_RECORDS) trainNetwork(network, networkConfig, partitions, NUM_RECORDS)
def initModel(self): """ Initialize the network; self.networdDataPath must already be set. """ encoder = CioEncoder(retinaScaling=self.retinaScaling, retina=self.retina, apiKey=self.apiKey, maxSparsity=self.maxSparsity, verbosity=self.verbosity - 1) # This encoder specifies the LanguageSensor output width. return configureNetwork(None, self.networkConfig, encoder)
def initModel(self): """ Initialize the network; self.networdDataPath must already be set. """ recordStream = FileRecordStream(streamID=self.networkDataPath) root = os.path.dirname(os.path.realpath(__file__)) encoder = CioEncoder(retinaScaling=self.retinaScaling, cacheDir=os.path.join(root, "CioCache"), retina=self.retina, apiKey=self.apiKey) # This encoder specifies the LanguageSensor output width. return configureNetwork(recordStream, self.networkConfig, encoder)
def _initModel(self, cacheRoot): """ Initialize the network; self.networdDataPath must already be set. """ encoder = CioEncoder(retinaScaling=self.retinaScaling, retina=self.retina, apiKey=self.apiKey, maxSparsity=self.maxSparsity, verbosity=self.verbosity-1, cacheDir=cacheRoot) # This encoder specifies the LanguageSensor output width. return configureNetwork(None, self.networkConfig, encoder)
def _initModel(self, k): """ Initialize the network """ encoder = CioEncoder(retinaScaling=self.retinaScaling, retina=self.retina, fingerprintType=EncoderTypes.document, apiKey=self.apiKey, verbosity=self.verbosity - 1) modelConfig["classifierRegionConfig"]["regionParams"]["k"] = k modelConfig["classifierRegionConfig"]["regionParams"][ "maxCategoryCount"] = self.numLabels self.networkConfig = modelConfig self.network = configureNetwork(None, self.networkConfig, encoder)
def _initModel(self, k): """ Initialize the network """ encoder = CioEncoder(retinaScaling=self.retinaScaling, retina=self.retina, fingerprintType=EncoderTypes.document, apiKey=self.apiKey, verbosity=self.verbosity-1) modelConfig["classifierRegionConfig"]["regionParams"]["k"] = k modelConfig["classifierRegionConfig"]["regionParams"][ "maxCategoryCount"] = self.numLabels self.networkConfig = modelConfig self.network = configureNetwork(None, self.networkConfig, encoder)
def _initModel(self, k): """ Initialize the network """ root = os.path.dirname(os.path.realpath(__file__)) encoder = CioEncoder(retinaScaling=self.retinaScaling, cacheDir=os.path.join(root, "CioCache"), retina=self.retina, fingerprintType=EncoderTypes.document, apiKey=self.apiKey) modelConfig["classifierRegionConfig"]["regionParams"]["k"] = k modelConfig["classifierRegionConfig"]["regionParams"][ "maxCategoryCount"] = self.numLabels self.networkConfig = modelConfig self.network = configureNetwork(None, self.networkConfig, encoder)
def _initModel(self, k): """ Initialize the network """ root = os.path.dirname(os.path.realpath(__file__)) encoder = CioEncoder( retinaScaling=self.retinaScaling, cacheDir=os.path.join(root, "CioCache"), retina=self.retina, fingerprintType=EncoderTypes.document, apiKey=self.apiKey, ) modelConfig["classifierRegionConfig"]["regionParams"]["k"] = k modelConfig["classifierRegionConfig"]["regionParams"]["maxCategoryCount"] = self.numLabels self.networkConfig = modelConfig self.network = configureNetwork(None, self.networkConfig, encoder)
def initModel(self): """ Initialize the network; self.networdDataPath must already be set. """ if self.networkDataPath is not None: recordStream = FileRecordStream(streamID=self.networkDataPath) else: recordStream = None root = os.path.dirname(os.path.realpath(__file__)) encoder = CioEncoder(retinaScaling=self.retinaScaling, cacheDir=os.path.join(root, "CioCache"), retina=self.retina, apiKey=self.apiKey) # This encoder specifies the LanguageSensor output width. return configureNetwork(recordStream, self.networkConfig, encoder)
def initModel(self): """ Initialize the network; self.networdDataPath must already be set. """ if self.networkDataPath is not None: recordStream = FileRecordStream(streamID=self.networkDataPath) else: recordStream = None encoder = CioEncoder( retinaScaling=self.retinaScaling, cacheDir=os.path.join(self.cacheRoot, "CioCache"), retina=self.retina, apiKey=self.apiKey, verbosity=self.verbosity - 1, ) # This encoder specifies the LanguageSensor output width. return configureNetwork(recordStream, self.networkConfig, encoder)
def initialize(self): """Initialize classification networks for left and right electrodes.""" data_source = FileRecordStream(streamID=self.training_set) self.network = configureNetwork(data_source, self.network_config)
def testClassificationAccuracy(self): """Test classification accuracy for sensor data.""" networkConfigurations = generateSampleNetworkConfig( self.templateNetworkConfig, NUM_CATEGORIES) for networkConfig in networkConfigurations: for noiseAmplitude in WHITE_NOISE_AMPLITUDES: for signalMean in SIGNAL_MEANS: for signalAmplitude in SIGNAL_AMPLITUDES: for signalPeriod in SIGNAL_PERIODS: sensorType = networkConfig[ "sensorRegionConfig"].get("regionType") spEnabled = networkConfig[ "sensorRegionConfig"].get("regionEnabled") tmEnabled = networkConfig["tmRegionConfig"].get( "regionEnabled") upEnabled = networkConfig["tpRegionConfig"].get( "regionEnabled") classifierType = networkConfig[ "classifierRegionConfig"].get("regionType") expParams = ("RUNNING EXPERIMENT WITH PARAMS:\n" " * numRecords=%s\n" " * signalAmplitude=%s\n" " * signalMean=%s\n" " * signalPeriod=%s\n" " * noiseAmplitude=%s\n" " * sensorType=%s\n" " * spEnabled=%s\n" " * tmEnabled=%s\n" " * tpEnabled=%s\n" " * classifierType=%s\n") % ( NUM_RECORDS, signalAmplitude, signalMean, signalPeriod, noiseAmplitude, sensorType.split(".")[1], spEnabled, tmEnabled, upEnabled, classifierType.split(".")[1]) print expParams inputFile = generateSensorData( DATA_DIR, OUTFILE_NAME, signalMean, signalPeriod, SEQUENCE_LENGTH, NUM_RECORDS, signalAmplitude, NUM_CATEGORIES, noiseAmplitude) dataSource = FileRecordStream(streamID=inputFile) network = configureNetwork(dataSource, networkConfig) partitions = generateNetworkPartitions( networkConfig, NUM_RECORDS) classificationAccuracy = trainNetwork( network, networkConfig, partitions, NUM_RECORDS) if (noiseAmplitude == 0 and signalMean == 1.0 and signalAmplitude == 1.0 and signalPeriod == 20.0 and classifierType == KNN_CLASSIFIER_TYPE and spEnabled and tmEnabled and not upEnabled): self.assertEqual(classificationAccuracy, 100.00) elif (noiseAmplitude == 0 and signalMean == 1.0 and signalAmplitude == 1.0 and signalPeriod == 20.0 and classifierType == CLA_CLASSIFIER_TYPE and spEnabled and tmEnabled and not upEnabled): self.assertEqual(classificationAccuracy, 100.00) elif (noiseAmplitude == 0 and signalMean == 1.0 and signalAmplitude == 1.0 and signalPeriod == 20.0 and classifierType == CLA_CLASSIFIER_TYPE and spEnabled and not tmEnabled and not upEnabled): self.assertEqual(classificationAccuracy, 100.00) elif (noiseAmplitude == 1.0 and signalMean == 1.0 and signalAmplitude == 1.0 and signalPeriod == 20.0 and classifierType == CLA_CLASSIFIER_TYPE and spEnabled and tmEnabled and not upEnabled): # using AlmostEqual until the random bug issue is fixed self.assertAlmostEqual(classificationAccuracy, 80, delta=5) elif (noiseAmplitude == 1.0 and signalMean == 1.0 and signalAmplitude == 1.0 and signalPeriod == 20.0 and classifierType == CLA_CLASSIFIER_TYPE and spEnabled and not tmEnabled and not upEnabled): # using AlmostEqual until the random bug issue is fixed self.assertAlmostEqual(classificationAccuracy, 81, delta=5)
def run(): """ Run classification network(s) on artificial sensor data """ with open("network_config_template.json", "rb") as jsonFile: templateNetworkConfig = json.load(jsonFile) networkConfigurations = generateSampleNetworkConfig(templateNetworkConfig, NUM_CATEGORIES) for networkConfig in networkConfigurations: for noiseAmplitude in WHITE_NOISE_AMPLITUDES: for signalMean in SIGNAL_MEANS: for signalAmplitude in SIGNAL_AMPLITUDES: for signalPeriod in SIGNAL_PERIODS: sensorType = networkConfig["sensorRegionConfig"].get( "regionType") spEnabled = networkConfig["sensorRegionConfig"].get( "regionEnabled") tmEnabled = networkConfig["tmRegionConfig"].get( "regionEnabled") upEnabled = networkConfig["tpRegionConfig"].get( "regionEnabled") classifierType = networkConfig["classifierRegionConfig"].get( "regionType") expParams = ("RUNNING EXPERIMENT WITH PARAMS:\n" " * numRecords=%s\n" " * signalAmplitude=%s\n" " * signalMean=%s\n" " * signalPeriod=%s\n" " * noiseAmplitude=%s\n" " * sensorType=%s\n" " * spEnabled=%s\n" " * tmEnabled=%s\n" " * tpEnabled=%s\n" " * classifierType=%s\n" ) % (NUM_RECORDS, signalAmplitude, signalMean, signalPeriod, noiseAmplitude, sensorType.split(".")[1], spEnabled, tmEnabled, upEnabled, classifierType.split(".")[1]) print expParams inputFile = generateSensorData(DATA_DIR, OUTFILE_NAME, signalMean, signalPeriod, SEQUENCE_LENGTH, NUM_RECORDS, signalAmplitude, NUM_CATEGORIES, noiseAmplitude) dataSource = FileRecordStream(streamID=inputFile) network = configureNetwork(dataSource, networkConfig) partitions = generateNetworkPartitions(networkConfig, NUM_RECORDS) trainNetwork(network, networkConfig, partitions, NUM_RECORDS)
def testClassificationAccuracy(self): """Test classification accuracy for sensor data.""" networkConfigurations = generateSampleNetworkConfig( self.templateNetworkConfig, NUM_CATEGORIES) for networkConfig in networkConfigurations: for noiseAmplitude in WHITE_NOISE_AMPLITUDES: for signalMean in SIGNAL_MEANS: for signalAmplitude in SIGNAL_AMPLITUDES: for signalPeriod in SIGNAL_PERIODS: sensorType = networkConfig["sensorRegionConfig"].get( "regionType") spEnabled = networkConfig["sensorRegionConfig"].get( "regionEnabled") tmEnabled = networkConfig["tmRegionConfig"].get( "regionEnabled") upEnabled = networkConfig["tpRegionConfig"].get( "regionEnabled") classifierType = networkConfig["classifierRegionConfig"].get( "regionType") expParams = ("RUNNING EXPERIMENT WITH PARAMS:\n" " * numRecords=%s\n" " * signalAmplitude=%s\n" " * signalMean=%s\n" " * signalPeriod=%s\n" " * noiseAmplitude=%s\n" " * sensorType=%s\n" " * spEnabled=%s\n" " * tmEnabled=%s\n" " * tpEnabled=%s\n" " * classifierType=%s\n" ) % (NUM_RECORDS, signalAmplitude, signalMean, signalPeriod, noiseAmplitude, sensorType.split(".")[1], spEnabled, tmEnabled, upEnabled, classifierType.split(".")[1]) print expParams inputFile = generateSensorData(DATA_DIR, OUTFILE_NAME, signalMean, signalPeriod, SEQUENCE_LENGTH, NUM_RECORDS, signalAmplitude, NUM_CATEGORIES, noiseAmplitude) dataSource = FileRecordStream(streamID=inputFile) network = configureNetwork(dataSource, networkConfig) partitions = generateNetworkPartitions(networkConfig, NUM_RECORDS) classificationAccuracy = trainNetwork(network, networkConfig, partitions, NUM_RECORDS) if (noiseAmplitude == 0 and signalMean == 1.0 and signalAmplitude == 1.0 and signalPeriod == 20.0 and classifierType == KNN_CLASSIFIER_TYPE and spEnabled and tmEnabled and not upEnabled): self.assertEqual(classificationAccuracy, 100.00) elif (noiseAmplitude == 0 and signalMean == 1.0 and signalAmplitude == 1.0 and signalPeriod == 20.0 and classifierType == CLA_CLASSIFIER_TYPE and spEnabled and tmEnabled and not upEnabled): self.assertEqual(classificationAccuracy, 100.00) elif (noiseAmplitude == 0 and signalMean == 1.0 and signalAmplitude == 1.0 and signalPeriod == 20.0 and classifierType == CLA_CLASSIFIER_TYPE and spEnabled and not tmEnabled and not upEnabled): self.assertEqual(classificationAccuracy, 100.00) elif (noiseAmplitude == 1.0 and signalMean == 1.0 and signalAmplitude == 1.0 and signalPeriod == 20.0 and classifierType == CLA_CLASSIFIER_TYPE and spEnabled and tmEnabled and not upEnabled): # using AlmostEqual until the random bug issue is fixed self.assertAlmostEqual(classificationAccuracy, 80, delta=5) elif (noiseAmplitude == 1.0 and signalMean == 1.0 and signalAmplitude == 1.0 and signalPeriod == 20.0 and classifierType == CLA_CLASSIFIER_TYPE and spEnabled and not tmEnabled and not upEnabled): # using AlmostEqual until the random bug issue is fixed self.assertAlmostEqual(classificationAccuracy, 81, delta=5)
def run(): """ Run classification network(s) on artificial sensor data """ if USE_JSON_CONFIG: with open('config/network_configs.json', 'rb') as fr: networkConfigurations = json.load(fr) else: with open("config/network_config_template.json", "rb") as jsonFile: templateNetworkConfig = json.load(jsonFile) networkConfigurations = generateSampleNetworkConfig(templateNetworkConfig, NUM_CATEGORIES) expSetups = [] classificationResults = [] for signalType in SIGNAL_TYPES: for networkConfig in networkConfigurations: for noiseAmplitude in WHITE_NOISE_AMPLITUDES: for signalMean in SIGNAL_MEANS: for signalAmplitude in SIGNAL_AMPLITUDES: for numCategories in NUM_CATEGORIES: for numReps in NUM_REPS: for numPhases in NUM_PHASES: spEnabled = networkConfig["sensorRegionConfig"].get( "regionEnabled") tmEnabled = networkConfig["tmRegionConfig"].get( "regionEnabled") upEnabled = networkConfig["tpRegionConfig"].get( "regionEnabled") classifierType = networkConfig["classifierRegionConfig"].get( "regionType") expSetup = generateSensorData(signalType, DATA_DIR, numPhases, numReps, signalMean, signalAmplitude, numCategories, noiseAmplitude) expSetup['expId'] = len(expSetups) expSetups.append(expSetup) dataSource = FileRecordStream(streamID=expSetup['filePath']) network = configureNetwork(dataSource, networkConfig) partitions = generateNetworkPartitions(networkConfig, expSetup['numPoints']) classificationAccuracy = trainNetwork(network, networkConfig, partitions, expSetup['numPoints'], VERBOSITY) classificationResults.append( { 'spEnabled': spEnabled, 'tmEnabled': tmEnabled, 'upEnabled': upEnabled, 'classifierType': classifierType.split(".")[1], 'classificationAccuracy': classificationAccuracy }) print_and_save_results(classificationResults, expSetups)
def run(): """ Run classification network(s) on artificial sensor data """ if USE_JSON_CONFIG: with open('config/network_configs.json', 'rb') as fr: networkConfigurations = json.load(fr) else: with open("config/network_config_template.json", "rb") as jsonFile: templateNetworkConfig = json.load(jsonFile) networkConfigurations = generateSampleNetworkConfig( templateNetworkConfig, NUM_CATEGORIES) headers = [ 'numRecords', 'seqLength', 'numClasses', 'signalAmplitude', 'signalMean', 'signalPeriod', 'noiseAmplitude', 'spEnabled', 'tmEnabled', 'tpEnabled', 'classifierType', 'classificationAccuracy' ] with open(RESULTS_FILE, 'wb') as fw: writer = csv.writer(fw) writer.writerow(headers) t = PrettyTable(headers) for networkConfig in networkConfigurations: for noiseAmplitude in WHITE_NOISE_AMPLITUDES: for signalMean in SIGNAL_MEANS: for signalAmplitude in SIGNAL_AMPLITUDES: for signalPeriod in SIGNAL_PERIODS: spEnabled = networkConfig[ "sensorRegionConfig"].get("regionEnabled") tmEnabled = networkConfig["tmRegionConfig"].get( "regionEnabled") upEnabled = networkConfig["tpRegionConfig"].get( "regionEnabled") classifierType = networkConfig[ "classifierRegionConfig"].get("regionType") inputFile = generateSensorData( DATA_DIR, OUTFILE_NAME, signalMean, signalPeriod, SEQUENCE_LENGTH, NUM_RECORDS, signalAmplitude, NUM_CATEGORIES, noiseAmplitude) dataSource = FileRecordStream(streamID=inputFile) network = configureNetwork(dataSource, networkConfig) partitions = generateNetworkPartitions( networkConfig, NUM_RECORDS) classificationAccuracy = trainNetwork( network, networkConfig, partitions, NUM_RECORDS, VERBOSITY) results = [ NUM_RECORDS, SEQUENCE_LENGTH, NUM_CATEGORIES, signalAmplitude, signalMean, signalPeriod, noiseAmplitude, spEnabled, tmEnabled, upEnabled, classifierType.split(".")[1], classificationAccuracy ] writer.writerow(results) t.add_row(results) print '%s\n' % t print '==> Results saved to %s\n' % RESULTS_FILE
def run(): """ Run classification network(s) on artificial sensor data """ if USE_JSON_CONFIG: with open('config/network_configs.json', 'rb') as fr: networkConfigurations = json.load(fr) else: with open("config/network_config_template.json", "rb") as jsonFile: templateNetworkConfig = json.load(jsonFile) networkConfigurations = generateSampleNetworkConfig(templateNetworkConfig, NUM_CATEGORIES) headers = ['numRecords', 'seqLength', 'numClasses', 'signalAmplitude', 'signalMean', 'signalPeriod', 'noiseAmplitude', 'spEnabled', 'tmEnabled', 'tpEnabled', 'classifierType', 'classificationAccuracy'] with open(RESULTS_FILE, 'wb') as fw: writer = csv.writer(fw) writer.writerow(headers) t = PrettyTable(headers) for networkConfig in networkConfigurations: for noiseAmplitude in WHITE_NOISE_AMPLITUDES: for signalMean in SIGNAL_MEANS: for signalAmplitude in SIGNAL_AMPLITUDES: for signalPeriod in SIGNAL_PERIODS: spEnabled = networkConfig["sensorRegionConfig"].get( "regionEnabled") tmEnabled = networkConfig["tmRegionConfig"].get( "regionEnabled") upEnabled = networkConfig["tpRegionConfig"].get( "regionEnabled") classifierType = networkConfig["classifierRegionConfig"].get( "regionType") inputFile = generateSensorData(DATA_DIR, OUTFILE_NAME, signalMean, signalPeriod, SEQUENCE_LENGTH, NUM_RECORDS, signalAmplitude, NUM_CATEGORIES, noiseAmplitude) dataSource = FileRecordStream(streamID=inputFile) network = configureNetwork(dataSource, networkConfig) partitions = generateNetworkPartitions(networkConfig, NUM_RECORDS) classificationAccuracy = trainNetwork(network, networkConfig, partitions, NUM_RECORDS, VERBOSITY) results = [NUM_RECORDS, SEQUENCE_LENGTH, NUM_CATEGORIES, signalAmplitude, signalMean, signalPeriod, noiseAmplitude, spEnabled, tmEnabled, upEnabled, classifierType.split(".")[1], classificationAccuracy] writer.writerow(results) t.add_row(results) print '%s\n' % t print '==> Results saved to %s\n' % RESULTS_FILE
_CONFIG = simplejson.load(open(_CONFIG_JSON, "rb")) _REGION_CONFIG_KEYS = ("spRegionConfig", "tmRegionConfig", "tpRegionConfig", "classifierRegionConfig") _REGION_NAMES = [] for region in _REGION_CONFIG_KEYS: if _CONFIG[region].get("regionEnabled"): _REGION_NAMES.append(_CONFIG[region]["regionName"]) if __name__ == "__main__": dataSource = FileRecordStream(streamID="backup/training-data-attention.csv") network = configureNetwork(dataSource, _CONFIG) setNetworkLearningMode(network, _REGION_NAMES, True) sensorRegion = network.regions[ _CONFIG["sensorRegionConfig"].get("regionName")] classifierRegion = network.regions[ _CONFIG["classifierRegionConfig"].get("regionName")] headers = ["x", "y", "label"] num_correct = 0 for category in range(_NUM_CATEGORIES): csvFile = open(_INPUT_FILES[category], "rb") reader = csv.reader(csvFile) # skip 3 header rows reader.next()