def testNetwork(testPath="test_images/testing", savedNetworkFile="imageNet_net.nta"):
    net = Network(savedNetworkFile)
    sensor = net.regions["sensor"]
    sp = net.regions["SP"]
    classifier = net.regions["classifier"]

    print "Reading test images"
    sensor.executeCommand(["loadMultipleImages", testPath])
    numTestImages = sensor.getParameter("numImages")
    print "Number of test images", numTestImages

    start = time.time()

    # Various region parameters
    sensor.setParameter("explorer", "ExhaustiveSweep")
    classifier.setParameter("inferenceMode", 1)
    classifier.setParameter("learningMode", 0)
    sp.setParameter("inferenceMode", 1)
    sp.setParameter("learningMode", 0)

    numCorrect = 0
    for i in range(numTestImages):
        net.run(1)
        inferredCategory = classifier.getOutputData("categoriesOut").argmax()
        if sensor.getOutputData("categoryOut") == inferredCategory:
            numCorrect += 1
        if i % (numTestImages / 100) == 0:
            print "Iteration", i, "numCorrect=", numCorrect

    # Some interesting statistics
    print "Testing time:", time.time() - start
    print "Number of test images", numTestImages
    print "num correct=", numCorrect
    print "pct correct=", (100.0 * numCorrect) / numTestImages
Ejemplo n.º 2
0
    def testGetSelf(self):
        # Create network
        net = Network()

        # Register ImageSensor region
        Network.registerRegion(ImageSensor)

        # Add sensor
        sensor = net.addRegion("sensor", "py.ImageSensor",
                               "{width: 100, height: 50}")
        pysensor = sensor.getSelf()

        # Verify set parameters
        self.assertTrue(isinstance(pysensor, ImageSensor))
        self.assertEqual(pysensor.height, 50)
        self.assertEqual(pysensor.width, 100)

        self.assertEqual(pysensor.width, sensor.getParameter('width'))
        self.assertEqual(pysensor.height, sensor.getParameter('height'))

        sensor.setParameter('width', 444)
        sensor.setParameter('height', 444)
        self.assertEqual(pysensor.width, 444)
        self.assertEqual(pysensor.height, 444)

        # Verify py object is not a copy
        sensor.getSelf().height = 100
        sensor.getSelf().width = 200
        self.assertEqual(pysensor.height, 100)
        self.assertEqual(pysensor.width, 200)

        pysensor.height = 50
        pysensor.width = 100
        self.assertEqual(sensor.getSelf().height, 50)
        self.assertEqual(sensor.getSelf().width, 100)
Ejemplo n.º 3
0
def _createLPFNetwork(addSP = True, addTP = False):
  """Create an 'old-style' network ala LPF and return it."""

  # ==========================================================================
  # Create the encoder and data source stuff we need to configure the sensor
  sensorParams = dict(verbosity = _VERBOSITY)
  encoder = _createEncoder()
  trainFile = findDataset("extra/gym/gym.csv")
  dataSource = FileRecordStream(streamID=trainFile)
  dataSource.setAutoRewind(True)

  # Create all the stuff we need to configure the CLARegion
  g_claConfig['spEnable'] = addSP
  g_claConfig['tpEnable'] = addTP
  claParams = _getCLAParams(encoder = encoder, config= g_claConfig)
  claParams['spSeed'] = g_claConfig['spSeed']
  claParams['tpSeed'] = g_claConfig['tpSeed']

  # ==========================================================================
  # Now create the network itself
  n = Network()

  n.addRegion("sensor", "py.RecordSensor", json.dumps(sensorParams))

  sensor = n.regions['sensor'].getSelf()
  sensor.encoder = encoder
  sensor.dataSource = dataSource

  n.addRegion("level1", "py.CLARegion", json.dumps(claParams))

  n.link("sensor", "level1", "UniformLink", "")
  n.link("sensor", "level1", "UniformLink", "",
         srcOutput="resetOut", destInput="resetIn")

  return n
Ejemplo n.º 4
0
    def testScalars(self):
        scalars = [("int32Param", 32, int, 35), ("uint32Param", 33, int, 36),
                   ("int64Param", 64, int, 74), ("uint64Param", 65, int, 75),
                   ("real32Param", 32.1, float, 33.1),
                   ("real64Param", 64.1, float, 65.1),
                   ("stringParam", "nodespec value", str, "new value")]

        n = Network()
        l1 = n.addRegion("l1", "TestNode", "")
        x = l1.getParameter("uint32Param")

        for paramName, initval, paramtype, newval in scalars:
            # Check the initial value for each parameter.
            x = l1.getParameter(paramName)
            self.assertEqual(type(x), paramtype)
            if initval is None:
                continue
            if type(x) == float:
                self.assertTrue(abs(x - initval) < 0.00001)
            else:
                self.assertEqual(x, initval)

            # Now set the value, and check to make sure the value is updated
            l1.setParameter(paramName, newval)
            x = l1.getParameter(paramName)
            self.assertEqual(type(x), paramtype)
            if type(x) == float:
                self.assertTrue(abs(x - newval) < 0.00001)
            else:
                self.assertEqual(x, newval)
Ejemplo n.º 5
0
    def testRunPCANode(self):
        from nupic.engine import *

        rgen = numpy.random.RandomState(37)

        inputSize = 8

        net = Network()
        sensor = net.addRegion(
            'sensor', 'py.ImageSensor',
            '{ width: %d, height: %d }' % (inputSize, inputSize))

        params = """{bottomUpCount: %d,
              SVDSampleCount: 5,
              SVDDimCount: 2}""" % inputSize

        pca = net.addRegion('pca', 'py.PCANode', params)

        #nodeAbove = CreateNode("py.ImageSensor", phase=0, categoryOut=1, dataOut=3,
        #                       width=3, height=1)
        #net.addElement('nodeAbove', nodeAbove)

        linkParams = '{ mapping: in, rfSize: [%d, %d] }' % (inputSize,
                                                            inputSize)
        net.link('sensor', 'pca', 'UniformLink', linkParams, 'dataOut',
                 'bottomUpIn')

        net.initialize()

        for i in range(10):
            pca.getSelf()._testInputs = numpy.random.random([inputSize])
            net.run(1)
Ejemplo n.º 6
0
    def testArrays(self):
        arrays = [
            ("real32ArrayParam",
             [0 * 32, 1 * 32, 2 * 32, 3 * 32, 4 * 32, 5 * 32, 6 * 32,
              7 * 32], "Real32"),
            ("int64ArrayParam", [0 * 64, 1 * 64, 2 * 64, 3 * 64], "Int64")
        ]

        n = Network()
        l1 = n.addRegion("l1", "TestNode", "")

        for paramName, initval, paramtype in arrays:
            x = l1.getParameter(paramName)
            self.assertTrue(isinstance(x,
                                       nupic.bindings.engine_internal.Array))
            self.assertEqual(x.getType(), paramtype)
            self.assertEqual(len(x), len(initval))
            for i in range(len(x)):
                self.assertEqual(x[i], initval[i])

            for i in range(len(x)):
                x[i] = x[i] * 2
            l1.setParameter(paramName, x)

            x = l1.getParameter(paramName)
            self.assertTrue(isinstance(x,
                                       nupic.bindings.engine_internal.Array))
            self.assertEqual(x.getType(), paramtype)
            self.assertEqual(len(x), len(initval))
            for i in range(len(x)):
                self.assertEqual(x[i], 2 * initval[i])
Ejemplo n.º 7
0
  def testScalars(self):
    scalars = [
      ("int32Param", 32, int, 35),
      ("uint32Param", 33, int, 36),
      ("int64Param", 64, long, 74),
      ("uint64Param", 65, long, 75),
      ("real32Param", 32.1, float, 33.1),
      ("real64Param", 64.1, float, 65.1),
      ("stringParam", "nodespec value", str, "new value")]

    n = Network()
    l1= n.addRegion("l1", "TestNode", "")
    x = l1.getParameter("uint32Param")

    for paramName, initval, paramtype, newval in scalars:
      # Check the initial value for each parameter.
      x = l1.getParameter(paramName)
      self.assertEqual(type(x), paramtype)
      if initval is None:
        continue
      if type(x) == float:
        self.assertTrue(abs(x  - initval) < 0.00001)
      else:
        self.assertEqual(x, initval)

      # Now set the value, and check to make sure the value is updated
      l1.setParameter(paramName, newval)
      x = l1.getParameter(paramName)
      self.assertEqual(type(x), paramtype)
      if type(x) == float:
        self.assertTrue(abs(x  - newval) < 0.00001)
      else:
        self.assertEqual(x, newval)
def testNetwork(testPath="mnist/testing", savedNetworkFile="mnist_net.nta"):
    net = Network(savedNetworkFile)
    sensor = net.regions['sensor']
    sp = net.regions["SP"]
    classifier = net.regions['classifier']

    print "Reading test images"
    sensor.executeCommand(["loadMultipleImages", testPath])
    numTestImages = sensor.getParameter('numImages')
    print "Number of test images", numTestImages

    start = time.time()

    # Various region parameters
    sensor.setParameter('explorer', 'Flash')
    classifier.setParameter('inferenceMode', 1)
    classifier.setParameter('learningMode', 0)
    sp.setParameter('inferenceMode', 1)
    sp.setParameter('learningMode', 0)

    numCorrect = 0
    for i in range(numTestImages):
        net.run(1)
        inferredCategory = classifier.getOutputData('categoriesOut').argmax()
        if sensor.getOutputData('categoryOut') == inferredCategory:
            numCorrect += 1
        if i % (numTestImages / 100) == 0:
            print "Iteration", i, "numCorrect=", numCorrect

    # Some interesting statistics
    print "Testing time:", time.time() - start
    print "Number of test images", numTestImages
    print "num correct=", numCorrect
    print "pct correct=", (100.0 * numCorrect) / numTestImages
Ejemplo n.º 9
0
    def testGetSelf(self):
        # Create network
        net = Network()

        # Register ImageSensor region
        Network.registerRegion(ImageSensor)

        # Add sensor
        sensor = net.addRegion("sensor", "py.ImageSensor", "{width: 100, height: 50}")
        pysensor = sensor.getSelf()

        # Verify set parameters
        self.assertTrue(isinstance(pysensor, ImageSensor))
        self.assertEqual(pysensor.height, 50)
        self.assertEqual(pysensor.width, 100)

        self.assertEqual(pysensor.width, sensor.getParameter("width"))
        self.assertEqual(pysensor.height, sensor.getParameter("height"))

        sensor.setParameter("width", 444)
        sensor.setParameter("height", 444)
        self.assertEqual(pysensor.width, 444)
        self.assertEqual(pysensor.height, 444)

        # Verify py object is not a copy
        sensor.getSelf().height = 100
        sensor.getSelf().width = 200
        self.assertEqual(pysensor.height, 100)
        self.assertEqual(pysensor.width, 200)

        pysensor.height = 50
        pysensor.width = 100
        self.assertEqual(sensor.getSelf().height, 50)
        self.assertEqual(sensor.getSelf().width, 100)
Ejemplo n.º 10
0
def testNetwork(testPath="mnist/testing", savedNetworkFile="mnist_net.nta"):
  net = Network(savedNetworkFile)
  sensor = net.regions['sensor']
  sp = net.regions["SP"]
  classifier = net.regions['classifier']

  print "Reading test images"
  sensor.executeCommand(["loadMultipleImages",testPath])
  numTestImages = sensor.getParameter('numImages')
  print "Number of test images",numTestImages

  start = time.time()

  # Various region parameters
  sensor.setParameter('explorer','Flash')
  classifier.setParameter('inferenceMode', 1)
  classifier.setParameter('learningMode', 0)
  sp.setParameter('inferenceMode', 1)
  sp.setParameter('learningMode', 0)

  numCorrect = 0
  for i in range(numTestImages):
    net.run(1)
    inferredCategory = classifier.getOutputData('categoriesOut').argmax()
    if sensor.getOutputData('categoryOut') == inferredCategory:
      numCorrect += 1
    if i%(numTestImages/10)== 0:
      print "Iteration",i,"numCorrect=",numCorrect

  # Some interesting statistics
  print "Testing time:",time.time()-start
  print "Number of test images",numTestImages
  print "num correct=",numCorrect
  print "pct correct=",(100.0*numCorrect) / numTestImages
  def __init__(self, numColumns, L2Params, L4Params, L6aParams, repeat,
               logCalls=False):
    """
    Create a network consisting of multiple columns. Each column contains one L2,
    one L4 and one L6a layers. In addition all the L2 columns are fully
    connected to each other through their lateral inputs.

    :param numColumns: Number of columns to create
    :type numColumns: int
    :param L2Params: constructor parameters for :class:`ColumnPoolerRegion`
    :type L2Params: dict
    :param L4Params:  constructor parameters for :class:`ApicalTMPairRegion`
    :type L4Params: dict
    :param L6aParams: constructor parameters for :class:`GridCellLocationRegion`
    :type L6aParams: dict
    :param repeat: Number of times each pair should be seen to be learned
    :type repeat: int
    :param logCalls: If true, calls to main functions will be logged internally.
                     The log can then be saved with saveLogs(). This allows us
                     to recreate the complete network behavior using
                     rerunExperimentFromLogfile which is very useful for
                     debugging.
    :type logCalls: bool
    """
    # Handle logging - this has to be done first
    self.logCalls = logCalls

    self.numColumns = numColumns
    self.repeat = repeat

    network = Network()
    self.network = createMultipleL246aLocationColumn(network=network,
                                                     numberOfColumns=self.numColumns,
                                                     L2Params=L2Params,
                                                     L4Params=L4Params,
                                                     L6aParams=L6aParams)
    network.initialize()

    self.sensorInput = []
    self.motorInput = []
    self.L2Regions = []
    self.L4Regions = []
    self.L6aRegions = []
    for i in xrange(self.numColumns):
      col = str(i)
      self.sensorInput.append(network.regions["sensorInput_" + col].getSelf())
      self.motorInput.append(network.regions["motorInput_" + col].getSelf())
      self.L2Regions.append(network.regions["L2_" + col])
      self.L4Regions.append(network.regions["L4_" + col])
      self.L6aRegions.append(network.regions["L6a_" + col])

    if L6aParams is not None and "dimensions" in L6aParams:
      self.dimensions = L6aParams["dimensions"]
    else:
      self.dimensions = 2

    self.sdrSize = L2Params["sdrSize"]

    # will be populated during training
    self.learnedObjects = {}
Ejemplo n.º 12
0
  def testArrays(self):
    arrays = [
      ("real32ArrayParam",
        [0*32, 1*32, 2*32, 3*32, 4*32, 5*32, 6*32, 7*32],
       "Real32"),
      ("int64ArrayParam",
        [0*64, 1*64, 2*64, 3*64],
        "Int64")
    ]

    n = Network()
    l1= n.addRegion("l1", "TestNode", "")

    for paramName, initval, paramtype in arrays:
      x = l1.getParameter(paramName)
      self.assertTrue(isinstance(x, nupic.bindings.engine_internal.Array))
      self.assertEqual(x.getType(), paramtype)
      self.assertEqual(len(x), len(initval))
      for i in xrange(len(x)):
        self.assertEqual(x[i], initval[i])

      for i in xrange(len(x)):
        x[i] = x[i] * 2
      l1.setParameter(paramName, x)

      x = l1.getParameter(paramName)
      self.assertTrue(isinstance(x, nupic.bindings.engine_internal.Array))
      self.assertEqual(x.getType(), paramtype)
      self.assertEqual(len(x), len(initval))
      for i in xrange(len(x)):
        self.assertEqual(x[i], 2 * initval[i])
Ejemplo n.º 13
0
def _createNetwork():
  """Create network with one RecordSensor region."""
  network = Network()
  network.addRegion('sensor', 'py.RecordSensor', '{}')
  sensorRegion = network.regions['sensor'].getSelf()

  # Add an encoder.
  encoderParams = {'consumption': {'fieldname': 'consumption',
                                   'resolution': 0.88,
                                   'seed': 1,
                                   'name': 'consumption',
                                   'type': 'RandomDistributedScalarEncoder'}}

  encoder = MultiEncoder()
  encoder.addMultipleEncoders(encoderParams)
  sensorRegion.encoder = encoder

  # Add a data source.
  testDir = os.path.dirname(os.path.abspath(__file__))
  inputFile = os.path.join(testDir, 'fixtures', 'gymdata-test.csv')
  dataSource = FileRecordStream(streamID=inputFile)
  sensorRegion.dataSource = dataSource

  # Get and set what field index we want to predict.
  network.regions['sensor'].setParameter('predictedField', 'consumption')

  return network
Ejemplo n.º 14
0
  def testGetSelf(self):
    # Create network
    net = Network()

    # Add sensor
    sensor = net.addRegion("sensor", "py.ImageSensor",
        "{width: 100, height: 50}")
    pysensor = sensor.getSelf()

    # Verify set parameters
    self.assertEqual(type(pysensor), ImageSensor)
    self.assertEqual(pysensor.height, 50)
    self.assertEqual(pysensor.width, 100)

    self.assertEqual(pysensor.width, sensor.getParameter('width'))
    self.assertEqual(pysensor.height, sensor.getParameter('height'))

    sensor.setParameter('width', 444)
    sensor.setParameter('height', 444)
    self.assertEqual(pysensor.width, 444)
    self.assertEqual(pysensor.height, 444)

    # Verify py object is not a copy
    sensor.getSelf().height = 100
    sensor.getSelf().width = 200
    self.assertEqual(pysensor.height, 100)
    self.assertEqual(pysensor.width, 200)

    pysensor.height = 50
    pysensor.width = 100
    self.assertEqual(sensor.getSelf().height, 50)
    self.assertEqual(sensor.getSelf().width, 100)
Ejemplo n.º 15
0
  def testRunPCANode(self):
    from nupic.engine import *

    numpy.random.RandomState(37)

    inputSize = 8

    net = Network()
    net.addRegion('sensor', 'py.ImageSensor' ,
          '{ width: %d, height: %d }' % (inputSize, inputSize))

    params = """{bottomUpCount: %d,
              SVDSampleCount: 5,
              SVDDimCount: 2}""" % inputSize

    pca = net.addRegion('pca', 'py.PCANode', params)

    #nodeAbove = CreateNode("py.ImageSensor", phase=0, categoryOut=1, dataOut=3,
    #                       width=3, height=1)
    #net.addElement('nodeAbove', nodeAbove)

    linkParams = '{ mapping: in, rfSize: [%d, %d] }' % (inputSize, inputSize)
    net.link('sensor', 'pca', 'UniformLink', linkParams, 'dataOut', 'bottomUpIn')

    net.initialize()

    for i in range(10):
      pca.getSelf()._testInputs = numpy.random.random([inputSize])
      net.run(1)
Ejemplo n.º 16
0
def testNetwork(testPath, savedNetworkFile="mnist_net.nta"):
  net = Network(savedNetworkFile)
  sensor = net.regions["sensor"]
  sp = net.regions["SP"]
  classifier = net.regions["classifier"]

  print "Reading test images"
  sensor.executeCommand(["loadMultipleImages",testPath])
  numTestImages = sensor.getParameter("numImages")
  print "Number of test images",numTestImages

  start = time.time()

  # Various region parameters
  sensor.setParameter("explorer", yaml.dump(["RandomFlash",
                                             {"replacement": False}]))
  classifier.setParameter("inferenceMode", 1)
  classifier.setParameter("learningMode", 0)
  sp.setParameter("inferenceMode", 1)
  sp.setParameter("learningMode", 0)

  numCorrect = 0
  for i in range(numTestImages):
    net.run(1)
    inferredCategory = classifier.getOutputData("categoriesOut").argmax()
    if sensor.getOutputData("categoryOut") == inferredCategory:
      numCorrect += 1
    if i%(numTestImages/100)== 0:
      print "Iteration",i,"numCorrect=",numCorrect

  # Some interesting statistics
  print "Testing time:",time.time()-start
  print "Number of test images",numTestImages
  print "num correct=",numCorrect
  print "pct correct=",(100.0*numCorrect) / numTestImages
def createSensorRegion(network, sensorType, encoders, dataSource, numCats):
  """
  Initializes the sensor region with an encoder and data source.

  @param network      (Network)

  @param sensorType   (str)           Specific type of region, e.g.
      "py.RecordSensor"; possible options can be found in /nupic/regions/.

  @param encoders     (dict, encoder) If adding multiple encoders, pass a dict
      as specified in createEncoder() docstring. Otherwise an encoder object is
      expected.

  @param dataSource   (RecordStream)  Sensor region reads data from here.
  
  @param numCats   (int) Maximum number of categories of the input data.

  @return             (Region)        Sensor region of the network.
  """
  # Sensor region may be non-standard, so add custom region class to the network
  sensorName = sensorType.split(".")[1]
  sensorModule = sensorName  # conveniently have the same name
  if sensorName not in PY_REGIONS:
    # Add new region class to the network
    try:
      module = __import__(sensorModule, {}, {}, sensorName)
      sensorClass = getattr(module, sensorName)
      Network.registerRegion(sensorClass)
      # Add region to list of registered PyRegions
      PY_REGIONS.append(sensorName)
    except ImportError:
      raise RuntimeError("Could not find sensor \'{}\' to import.".
                         format(sensorName))

  try:
    # Add region to network
    regionParams = json.dumps({"verbosity": _VERBOSITY,
                               "numCategories": numCats})
    network.addRegion("sensor", sensorType, regionParams)
  except RuntimeError:
    print ("Custom region not added correctly. Possible issues are the spec is "
          "wrong or the region class is not in the Python path.")
    return

  # getSelf() returns the actual region, instead of a region wrapper
  sensorRegion = network.regions["sensor"].getSelf()

  # Specify how the sensor encodes input values
  if isinstance(encoders, dict):
    # Add encoder(s) from params dict:
    sensorRegion.encoder = createEncoder(encoders)
  else:
    sensorRegion.encoder = encoders

  # Specify the dataSource as a file RecordStream instance
  sensorRegion.dataSource = dataSource

  return sensorRegion
Ejemplo n.º 18
0
def createNetwork(dataSource):

    network = Network()
    network.addRegion("sensor", "py.RecordSensor",
                      json.dumps({"verbosity": _VERBOSITY}))
    sensor = network.regions["sensor"].getSelf()
    sensor.encoder = createEncoder()
    sensor.dataSource = dataSource

    return network
Ejemplo n.º 19
0
def _createLPFNetwork(addSP=True, addTP=False):
    """Create an 'old-style' network ala LPF and return it."""

    # ==========================================================================
    # Create the encoder and data source stuff we need to configure the sensor
    sensorParams = dict(verbosity=_VERBOSITY)
    encoder = _createEncoder()
    trainFile = findDataset("extra/gym/gym.csv")
    dataSource = FileRecordStream(streamID=trainFile)
    dataSource.setAutoRewind(True)

    # Create all the stuff we need to configure the CLARegion
    g_claConfig["spEnable"] = addSP
    g_claConfig["tpEnable"] = addTP
    claParams = _getCLAParams(encoder=encoder, config=g_claConfig)
    claParams["spSeed"] = g_claConfig["spSeed"]
    claParams["tpSeed"] = g_claConfig["tpSeed"]

    # ==========================================================================
    # Now create the network itself
    n = Network()

    n.addRegion("sensor", "py.RecordSensor", json.dumps(sensorParams))

    sensor = n.regions["sensor"].getSelf()
    sensor.encoder = encoder
    sensor.dataSource = dataSource

    n.addRegion("level1", "py.CLARegion", json.dumps(claParams))

    n.link("sensor", "level1", "UniformLink", "")
    n.link("sensor", "level1", "UniformLink", "", srcOutput="resetOut", destInput="resetIn")

    return n
Ejemplo n.º 20
0
    def __init__(self, model_params):
        # Init an HTM network
        self.network = Network()

        # Getting parameters for network regions
        self.sensor_params = model_params['Sensor']
        self.spatial_pooler_params = model_params['SpatialPooler']
        self.temporal_memory_params = model_params['TemporalMemory']
        self.classifiers_params = model_params['Classifiers']
        self.encoders_params = model_params['Encoders']

        # Adding regions to HTM network
        self.network.addRegion('DurationEncoder', 'ScalarSensor',
                               json.dumps(self.encoders_params['duration']))
        self.network.addRegion('VelocityEncoder', 'ScalarSensor',
                               json.dumps(self.encoders_params['pitch']))
        self.network.addRegion('PitchEncoder', 'ScalarSensor',
                               json.dumps(self.encoders_params['velocity']))

        self.network.addRegion('SpatialPooler', 'py.SPRegion',
                               json.dumps(self.spatial_pooler_params))
        self.network.addRegion('TemporalMemory', 'py.TMRegion',
                               json.dumps(self.temporal_memory_params))

        # Creating outer classifiers for multifield prediction
        dclp = self.classifiers_params['duration']
        vclp = self.classifiers_params['pitch']
        pclp = self.classifiers_params['velocity']

        self.duration_classifier = SDRClassifier(
            steps=(1, ),
            verbosity=dclp['verbosity'],
            alpha=dclp['alpha'],
            actValueAlpha=dclp['actValueAlpha'])
        self.velocity_classifier = SDRClassifier(
            steps=(1, ),
            verbosity=vclp['verbosity'],
            alpha=vclp['alpha'],
            actValueAlpha=vclp['actValueAlpha'])
        self.pitch_classifier = SDRClassifier(
            steps=(1, ),
            verbosity=pclp['verbosity'],
            alpha=pclp['alpha'],
            actValueAlpha=pclp['actValueAlpha'])

        self._link_all_regions()
        self._enable_learning()
        self._enable_inference()

        self.network.initialize()
Ejemplo n.º 21
0
    def testVaryingNumberOfCategories(self):
        # Setup network with sensor; max number of categories = 2
        net = Network()
        sensorRegion = net.addRegion("sensor", "py.RecordSensor",
                                     "{'numCategories': 2}")
        sensor = sensorRegion.getSelf()

        # Test for # of output categories = max
        data = {
            "_timestamp": None,
            "_category": [0, 1],
            "label": "0 1",
            "_sequenceId": 0,
            "y": 2.624902024,
            "x": 0.0,
            "_timestampRecordIdx": None,
            "_reset": 0
        }
        sensorOutput = numpy.array([0, 0], dtype="int32")
        sensor.populateCategoriesOut(data["_category"], sensorOutput)

        self.assertSequenceEqual([0, 1], sensorOutput.tolist(
        ), "Sensor failed to populate the array with record of two categories."
                                 )

        # Test for # of output categories > max
        data["_category"] = [1, 2, 3]
        sensorOutput = numpy.array([0, 0], dtype="int32")
        sensor.populateCategoriesOut(data["_category"], sensorOutput)

        self.assertSequenceEqual([1, 2], sensorOutput.tolist(
        ), "Sensor failed to populate the array w/ record of three categories."
                                 )

        # Test for # of output categories < max
        data["_category"] = [3]
        sensorOutput = numpy.array([0, 0], dtype="int32")
        sensor.populateCategoriesOut(data["_category"], sensorOutput)

        self.assertSequenceEqual(
            [3, -1], sensorOutput.tolist(),
            "Sensor failed to populate the array w/ record of one category.")

        # Test for no output categories
        data["_category"] = [None]
        sensorOutput = numpy.array([0, 0], dtype="int32")
        sensor.populateCategoriesOut(data["_category"], sensorOutput)

        self.assertSequenceEqual([-1, -1], sensorOutput.tolist(
        ), "Sensor failed to populate the array w/ record of zero categories.")
Ejemplo n.º 22
0
    def testParameters(self):
        # Test setting and getting parameters
        net = Network()

        # Add sensor to the network
        sensor = net.addRegion("sensor", "py.ImageSensor",
                               "{width: 100, height: 50}")

        # Verify get parameters
        self.assertEqual(sensor.getParameter('height'), 50)
        self.assertEqual(sensor.getParameter('width'), 100)

        # Verify set parameters
        sensor.setParameter('width', 42)
        self.assertEqual(sensor.getParameter('width'), 42)
Ejemplo n.º 23
0
  def testParameters(self):
    # Test setting and getting parameters
    net = Network()

    # Add sensor to the network
    sensor = net.addRegion("sensor", "py.ImageSensor",
        "{width: 100, height: 50}")

    # Verify get parameters
    self.assertEqual(sensor.getParameter('height'), 50)
    self.assertEqual(sensor.getParameter('width'), 100)

    # Verify set parameters
    sensor.setParameter('width', 42)
    self.assertEqual(sensor.getParameter('width'), 42)
Ejemplo n.º 24
0
    def _testRepeatCount(self):
        """Test setting and getting repeat count using parameters."""
        # Check default repeat count
        n = Network(self.filename)
        sensor = n.regions[self.nodeName]
        res = sensor.executeCommand(['dump'])
        expected = self.sensorName + \
          ' isLabeled = 0 repeatCount = 1 vectorCount = 0 iterations = 0\n'
        self.assertEqual(
            res, expected,
            "repeat count test:\n   expected '%s'\n   got      '%s'\n" %
            (expected, res))

        # Set to 42, check it and return it back to 1
        sensor.setParameter('repeatCount', 42)

        res = sensor.getParameter('repeatCount')
        self.assertEqual(
            res, 42, "set repeatCount to 42:\n   got back     '%d'\n" % res)

        res = sensor.executeCommand(['dump'])
        expected = (self.sensorName +
                    ' isLabeled = 0 repeatCount = 42 vectorCount = 0 '
                    'iterations = 0\n')
        self.assertEqual(
            res, expected,
            "set to 42 test:\n   expected '%s'\n   got      '%s'\n" %
            (expected, res))
        sensor.setParameter('repeatCount', 1)
Ejemplo n.º 25
0
    def testSaveAndReload(self):
        """
    This function tests saving and loading. It will train a network for 500
    iterations, then save it and reload it as a second network instance. It will
    then run both networks for 100 iterations and ensure they return identical
    results.
    """

        print "Creating network..."

        netOPF = _createOPFNetwork()
        level1OPF = netOPF.regions['level1SP']

        # ==========================================================================
        print "Training network for 500 iterations"
        level1OPF.setParameter('learningMode', 1)
        level1OPF.setParameter('inferenceMode', 0)
        netOPF.run(500)
        level1OPF.setParameter('learningMode', 0)
        level1OPF.setParameter('inferenceMode', 1)

        # ==========================================================================
        # Save network and reload as a second instance. We need to reset the data
        # source for the unsaved network so that both instances start at the same
        # place
        print "Saving and reload network"
        _, tmpNetworkFilename = _setupTempDirectory("trained.nta")
        netOPF.save(tmpNetworkFilename)
        netOPF2 = Network(tmpNetworkFilename)
        level1OPF2 = netOPF2.regions['level1SP']

        sensor = netOPF.regions['sensor'].getSelf()
        trainFile = resource_filename("nupic.datafiles", "extra/gym/gym.csv")
        sensor.dataSource = FileRecordStream(streamID=trainFile)
        sensor.dataSource.setAutoRewind(True)

        # ==========================================================================
        print "Running inference on the two networks for 100 iterations"
        for _ in xrange(100):
            netOPF2.run(1)
            netOPF.run(1)
            l1outputOPF2 = level1OPF2.getOutputData("bottomUpOut")
            l1outputOPF = level1OPF.getOutputData("bottomUpOut")
            opfHash2 = l1outputOPF2.nonzero()[0].sum()
            opfHash = l1outputOPF.nonzero()[0].sum()

            self.assertEqual(opfHash2, opfHash)
Ejemplo n.º 26
0
 def testSaveAndReload(self):
   """
   This function tests saving and loading. It will train a network for 500
   iterations, then save it and reload it as a second network instance. It will
   then run both networks for 100 iterations and ensure they return identical
   results.
   """
 
   print "Creating network..."
 
   netOPF = _createOPFNetwork()
   level1OPF = netOPF.regions['level1SP']
 
   # ==========================================================================
   print "Training network for 500 iterations"
   level1OPF.setParameter('learningMode', 1)
   level1OPF.setParameter('inferenceMode', 0)
   netOPF.run(500)
   level1OPF.setParameter('learningMode', 0)
   level1OPF.setParameter('inferenceMode', 1)
 
   # ==========================================================================
   # Save network and reload as a second instance. We need to reset the data
   # source for the unsaved network so that both instances start at the same
   # place
   print "Saving and reload network"
   _, tmpNetworkFilename = _setupTempDirectory("trained.nta")
   netOPF.save(tmpNetworkFilename)
   netOPF2 = Network(tmpNetworkFilename)
   level1OPF2 = netOPF2.regions['level1SP']
 
   sensor = netOPF.regions['sensor'].getSelf()
   trainFile = resource_filename("nupic.datafiles", "extra/gym/gym.csv")
   sensor.dataSource = FileRecordStream(streamID=trainFile)
   sensor.dataSource.setAutoRewind(True)
 
   # ==========================================================================
   print "Running inference on the two networks for 100 iterations"
   for _ in xrange(100):
     netOPF2.run(1)
     netOPF.run(1)
     l1outputOPF2 = level1OPF2.getOutputData("bottomUpOut")
     l1outputOPF  = level1OPF.getOutputData("bottomUpOut")
     opfHash2 = l1outputOPF2.nonzero()[0].sum()
     opfHash  = l1outputOPF.nonzero()[0].sum()
 
     self.assertEqual(opfHash2, opfHash)
Ejemplo n.º 27
0
def createNetwork(dataSource):
    '''
  Create and initialize a network.
  '''

    with open(_PARAMS_PATH, "r") as f:
        modelParams = yaml.safe_load(f)["modelParams"]

    # Create a network that will hold the regions.
    network = Network()

    # Add a sensor region.
    network.addRegion("sensor", "py.RecordSensor", "{}")

    # Set the encoder and data source of the sensor region.
    sensorRegion = network.regions["sensor"].getSelf()
    sensorRegion.encoder = createEncoder(
        modelParams["sensorParams"]["encoders"])
    sensorRegion.dataSource = dataSource

    # Make sure the SP input width matches the sensor region output width.
    modelParams["spParams"]["inputWidth"] = sensorRegion.encoder.getWidth()

    # Add SP and TP regions.
    network.addRegion("SP", "py.SPRegion", json.dumps(modelParams["spParams"]))
    network.addRegion("TM", "py.TMRegion", json.dumps(modelParams["tmParams"]))

    # Add a classifier region.
    clName = "py.%s" % modelParams["clParams"].pop("regionName")
    network.addRegion("classifier", clName,
                      json.dumps(modelParams["clParams"]))

    classifierRegion = network.regions["classifier"].getSelf()

    # Add all links
    createSensorToClassifierLinks(network, "sensor", "classifier")
    createDataOutLink(network, "sensor", "SP")
    createFeedForwardLink(network, "SP", "TM")
    createFeedForwardLink(network, "TM", "classifier")
    # Reset links are optional, since the sensor region does not send resets.
    createResetLink(network, "sensor", "SP")
    createResetLink(network, "sensor", "TM")

    # Make sure all objects are initialized.
    network.initialize()

    return network
Ejemplo n.º 28
0
def initModels():
	global model1
	model1.dataSource = NetworkModel.getOrCreateDataSource()
	model1.network = Network("network1.nta")
	#reset the dataSource
	model1.network.regions[_RECORD_SENSOR].dataSource = model1.dataSource
	
	"""
Ejemplo n.º 29
0
    def testParameters(self):
        # Test setting and getting parameters
        net = Network()

        # Register ImageSensor region
        Network.registerRegion(ImageSensor)

        # Add sensor to the network
        sensor = net.addRegion("sensor", "py.ImageSensor", "{width: 100, height: 50}")

        # Verify get parameters
        self.assertEqual(sensor.getParameter("height"), 50)
        self.assertEqual(sensor.getParameter("width"), 100)

        # Verify set parameters
        sensor.setParameter("width", 42)
        self.assertEqual(sensor.getParameter("width"), 42)
def createSensorRegion(network, sensorType, encoders, dataSource):
  """
  Initializes the sensor region with an encoder and data source.

  @param network      (Network)

  @param sensorType   (str)           Specific type of region, e.g.
      "py.RecordSensor"; possible options can be found in /nupic/regions/.

  @param encoders     (dict, encoder) If adding multiple encoders, pass a dict
      as specified in createEncoder() docstring. Otherwise an encoder object is
      expected.

  @param dataSource   (RecordStream)  Sensor region reads data from here.

  @return             (Region)        Sensor region of the network.
  """
  # Sensor region may be non-standard, so add custom region class to the network
  if sensorType.split(".")[1] not in PY_REGIONS:
    # Add new region class to the network
    Network.registerRegion(EEGSensor)

  try:
    # Add region to network
    regionParams = json.dumps({"verbosity": _VERBOSITY})
    network.addRegion("sensor", sensorType, regionParams)
  except RuntimeError:
    print ("Custom region not added correctly. Possible issues are the spec is "
          "wrong or the region class is not in the Python path.")
    return

  # getSelf() returns the actual region, instead of a region wrapper
  sensorRegion = network.regions["sensor"].getSelf()

  # Specify how RecordSensor encodes input values
  if isinstance(encoders, dict):
    # Multiple encoders to add
    sensorRegion.encoder = createEncoder(encoders)
  else:
    sensorRegion.encoder = encoders

  # Specify the dataSource as a file RecordStream instance
  sensorRegion.dataSource = dataSource

  return sensorRegion
Ejemplo n.º 31
0
def _registerRegion(regionTypeName, moduleName=None):
    """
  A region may be non-standard, so add custom region class to the network.

  @param regionTypeName: (str) type name of the region. E.g SensorRegion.
  @param moduleName: (str) location of the region class, only needed if
    registering a region that is outside the expected "regions/" dir.
  """
    if moduleName is None:
        # the region is located in the regions/ directory
        moduleName = "htmresearch.regions." + regionTypeName
    if regionTypeName not in _PY_REGIONS:
        # Add new region class to the network.
        module = __import__(moduleName, {}, {}, regionTypeName)
        unregisteredClass = getattr(module, regionTypeName)
        Network.registerRegion(unregisteredClass)
        # Add region to list of registered PyRegions
        _PY_REGIONS.append(regionTypeName)
def _registerRegion(regionTypeName, moduleName=None):
  """
  A region may be non-standard, so add custom region class to the network.

  @param regionTypeName: (str) type name of the region. E.g SensorRegion.
  @param moduleName: (str) location of the region class, only needed if
    registering a region that is outside the expected "regions/" dir.
  """
  if moduleName is None:
    # the region is located in the regions/ directory
    moduleName = "htmresearch.regions." + regionTypeName
  if regionTypeName not in _PY_REGIONS:
    # Add new region class to the network.
    module = __import__(moduleName, {}, {}, regionTypeName)
    unregisteredClass = getattr(module, regionTypeName)
    Network.registerRegion(unregisteredClass)
    # Add region to list of registered PyRegions
    _PY_REGIONS.append(regionTypeName)
Ejemplo n.º 33
0
  def _deSerializeExtraData(self, extraDataDir):
    """
    Protected method that is called during deserialization (after __setstate__)
    with an external directory path. We override it here to load the Network API
    instance.

    @param extraDataDir (string) Model's extra data directory path
    """
    self.network = Network(os.path.join(extraDataDir, "network.nta"))
    self._initializeRegionHelpers()
def _registerRegion(regionTypeName, moduleName=None):
  """
  A region may be non-standard, so add custom region class to the network.

  @param regionTypeName: (str) type name of the region. E.g SensorRegion.
  """
  if moduleName is None:
    moduleName = regionTypeName
  if regionTypeName not in _PY_REGIONS:
    # Add new region class to the network.
    try:
      module = __import__(moduleName, {}, {}, regionTypeName)
      unregisteredClass = getattr(module, regionTypeName)
      Network.registerRegion(unregisteredClass)
      # Add region to list of registered PyRegions
      _PY_REGIONS.append(regionTypeName)
    except ImportError:
      raise RuntimeError(
        "Could not import sensor \'{}\'.".format(regionTypeName))
Ejemplo n.º 35
0
def createNetwork():
    network = Network()

    #
    # Sensors
    #

    # C++
    consumptionSensor = network.addRegion(
        'consumptionSensor', 'ScalarSensor',
        json.dumps({
            'n': 120,
            'w': 21,
            'minValue': 0.0,
            'maxValue': 100.0,
            'clipInput': True
        }))

    return network
Ejemplo n.º 36
0
def registerResearchRegion(regionTypeName, moduleName=None):
  """
  Register this region so that NuPIC can later find it.

  @param regionTypeName: (str) type name of the region. E.g LanguageSensor.
  @param moduleName: (str) location of the region class, only needed if
    registering a region that is outside the expected "regions/" dir.
  """
  global _PY_REGIONS

  if moduleName is None:
    # the region is located in the regions/ directory
    moduleName = "htmresearch.regions." + regionTypeName
  if regionTypeName not in _PY_REGIONS:
    # Add new region class to the network.
    module = __import__(moduleName, {}, {}, regionTypeName)
    unregisteredClass = getattr(module, regionTypeName)
    Network.registerRegion(unregisteredClass)
    # Add region to list of registered PyRegions
    _PY_REGIONS.append(regionTypeName)
Ejemplo n.º 37
0
  def loadFromFile(self, filename):
    """ Load a serialized network
    :param filename: Where the network should be loaded from
    """
    print "Loading network from {file}...".format(file=filename)
    Network.unregisterRegion(ImageSensor.__name__)
    Network.registerRegion(ImageSensor)

    self.net = Network(filename)

    self.networkSensor = self.net.regions["sensor"]
    self.networkSensor.setParameter("numSaccades", SACCADES_PER_IMAGE_TESTING)

    self.networkSP = self.net.regions["SP"]
    self.networkClassifier = self.net.regions["classifier"]

    self.setLearningMode(learningSP=False,
                         learningClassifier=False)

    self.numCorrect = 0
Ejemplo n.º 38
0
def registerResearchRegion(regionTypeName, moduleName=None):
  """
  Register this region so that NuPIC can later find it.

  @param regionTypeName: (str) type name of the region. E.g LanguageSensor.
  @param moduleName: (str) location of the region class, only needed if
    registering a region that is outside the expected "regions/" dir.
  """
  global _PY_REGIONS

  if moduleName is None:
    # the region is located in the regions/ directory
    moduleName = "htmresearch.regions." + regionTypeName
  if regionTypeName not in _PY_REGIONS:
    # Add new region class to the network.
    module = __import__(moduleName, {}, {}, regionTypeName)
    unregisteredClass = getattr(module, regionTypeName)
    Network.registerRegion(unregisteredClass)
    # Add region to list of registered PyRegions
    _PY_REGIONS.append(regionTypeName)
Ejemplo n.º 39
0
  def testVaryingNumberOfCategories(self):
    # Setup network with sensor; max number of categories = 2
    net = Network()
    sensorRegion = net.addRegion(
        "sensor", "py.RecordSensor", "{'numCategories': 2}")
    sensor = sensorRegion.getSelf()

    # Test for # of output categories = max
    data = {"_timestamp": None, "_category": [0, 1], "label": "0 1",
            "_sequenceId": 0, "y": 2.624902024, "x": 0.0,
            "_timestampRecordIdx": None, "_reset": 0}
    sensorOutput = numpy.array([0, 0], dtype="int32")
    sensor.populateCategoriesOut(data["_category"], sensorOutput)
    
    self.assertSequenceEqual([0, 1], sensorOutput.tolist(),
        "Sensor failed to populate the array with record of two categories.")

    # Test for # of output categories > max
    data["_category"] = [1, 2, 3]
    sensorOutput = numpy.array([0, 0], dtype="int32")
    sensor.populateCategoriesOut(data["_category"], sensorOutput)
    
    self.assertSequenceEqual([1, 2], sensorOutput.tolist(),
        "Sensor failed to populate the array w/ record of three categories.")
    
    # Test for # of output categories < max
    data["_category"] = [3]
    sensorOutput = numpy.array([0, 0], dtype="int32")
    sensor.populateCategoriesOut(data["_category"], sensorOutput)
    
    self.assertSequenceEqual([3, -1], sensorOutput.tolist(),
        "Sensor failed to populate the array w/ record of one category.")
    
    # Test for no output categories
    data["_category"] = [None]
    sensorOutput = numpy.array([0, 0], dtype="int32")
    sensor.populateCategoriesOut(data["_category"], sensorOutput)

    self.assertSequenceEqual([-1, -1], sensorOutput.tolist(),
        "Sensor failed to populate the array w/ record of zero categories.")
Ejemplo n.º 40
0
def _createNetwork():
  """Create a network with a RecordSensor region and a SDRClassifier region"""

  network = Network()
  network.addRegion('sensor', 'py.RecordSensor', '{}')
  network.addRegion('classifier', 'py.SDRClassifierRegion', '{}')
  _createSensorToClassifierLinks(network, 'sensor', 'classifier')

  # Add encoder to sensor region.
  sensorRegion = network.regions['sensor'].getSelf()
  encoderParams = {'consumption': {'fieldname': 'consumption',
                                   'resolution': 0.88,
                                   'seed': 1,
                                   'name': 'consumption',
                                   'type': 'RandomDistributedScalarEncoder'}}

  encoder = MultiEncoder()
  encoder.addMultipleEncoders(encoderParams)
  sensorRegion.encoder = encoder

  # Add data source.
  testDir = os.path.dirname(os.path.abspath(__file__))
  inputFile = os.path.join(testDir, 'fixtures', 'gymdata-test.csv')
  dataSource = FileRecordStream(streamID=inputFile)
  sensorRegion.dataSource = dataSource

  # Get and set what field index we want to predict.
  predictedIdx = dataSource.getFieldNames().index('consumption')
  network.regions['sensor'].setParameter('predictedFieldIdx', predictedIdx)

  return network
Ejemplo n.º 41
0
  def testLoadImages(self):
    # Create a simple network with an ImageSensor. You can't actually run
    # the network because the region isn't connected to anything
    net = Network()
    Network.registerRegion(ImageSensor)
    net.addRegion("sensor", "py.ImageSensor", "{width: 32, height: 32}")
    sensor = net.regions['sensor']

    # Create a dataset with two categories, one image in each category
    # Each image consists of a unique rectangle
    tmpDir = tempfile.mkdtemp()
    os.makedirs(os.path.join(tmpDir,'0'))
    os.makedirs(os.path.join(tmpDir,'1'))

    im0 = Image.new("L",(32,32))
    draw = ImageDraw.Draw(im0)
    draw.rectangle((10,10,20,20), outline=255)
    im0.save(os.path.join(tmpDir,'0','im0.png'))

    im1 = Image.new("L",(32,32))
    draw = ImageDraw.Draw(im1)
    draw.rectangle((15,15,25,25), outline=255)
    im1.save(os.path.join(tmpDir,'1','im1.png'))

    # Load the dataset and check we loaded the correct number
    sensor.executeCommand(["loadMultipleImages", tmpDir])
    numImages = sensor.getParameter('numImages')
    self.assertEqual(numImages, 2)

    # Load a single image (this will replace the previous images)
    sensor.executeCommand(["loadSingleImage",
                           os.path.join(tmpDir,'1','im1.png')])
    numImages = sensor.getParameter('numImages')
    self.assertEqual(numImages, 1)

    # Cleanup the temp files
    os.unlink(os.path.join(tmpDir,'0','im0.png'))
    os.unlink(os.path.join(tmpDir,'1','im1.png'))
    os.removedirs(os.path.join(tmpDir,'0'))
    os.removedirs(os.path.join(tmpDir,'1'))
Ejemplo n.º 42
0
    def testLoadImages(self):
        # Create a simple network with an ImageSensor. You can't actually run
        # the network because the region isn't connected to anything
        net = Network()
        net.addRegion("sensor", "py.ImageSensor", "{width: 32, height: 32}")
        sensor = net.regions['sensor']

        # Create a dataset with two categories, one image in each category
        # Each image consists of a unique rectangle
        tmpDir = tempfile.mkdtemp()
        os.makedirs(os.path.join(tmpDir, '0'))
        os.makedirs(os.path.join(tmpDir, '1'))

        im0 = Image.new("L", (32, 32))
        draw = ImageDraw.Draw(im0)
        draw.rectangle((10, 10, 20, 20), outline=255)
        im0.save(os.path.join(tmpDir, '0', 'im0.png'))

        im1 = Image.new("L", (32, 32))
        draw = ImageDraw.Draw(im1)
        draw.rectangle((15, 15, 25, 25), outline=255)
        im1.save(os.path.join(tmpDir, '1', 'im1.png'))

        # Load the dataset and check we loaded the correct number
        sensor.executeCommand(["loadMultipleImages", tmpDir])
        numImages = sensor.getParameter('numImages')
        self.assertEqual(numImages, 2)

        # Load a single image (this will replace the previous images)
        sensor.executeCommand(
            ["loadSingleImage",
             os.path.join(tmpDir, '1', 'im1.png')])
        numImages = sensor.getParameter('numImages')
        self.assertEqual(numImages, 1)

        # Cleanup the temp files
        os.unlink(os.path.join(tmpDir, '0', 'im0.png'))
        os.unlink(os.path.join(tmpDir, '1', 'im1.png'))
        os.removedirs(os.path.join(tmpDir, '0'))
        os.removedirs(os.path.join(tmpDir, '1'))
def plotPermanences(network=None,
                    savedNetworkFile="mnist_net.nta",
                    columnList=None,
                    iteration=0):
    """
  Plots the permanences of the top columns into a single master image
  If columnList is specified, uses those columns otherwise extracts the
  most active columns from the spatial pooler using duty cycle.
  """
    # Get the spatial pooler from the network, otherwise read it from checkpoint.
    if network is None:
        network = Network(savedNetworkFile)
    spRegion = network.regions["SP"]
    spSelf = spRegion.getSelf()
    sp = spSelf._sfdr

    # If we are not given a column list, retrieve columns with highest duty cycles
    dutyCycles = numpy.zeros(sp.getNumColumns(), dtype=GetNTAReal())
    sp.getActiveDutyCycles(dutyCycles)
    if columnList is None:
        mostActiveColumns = list(dutyCycles.argsort())
        mostActiveColumns.reverse()
        columnList = mostActiveColumns[0:400]
        #print columnList

    # Create empty master image with the top 25 columns. We will paste
    # individual column images into this image
    numImagesPerRowInMaster = 20
    masterImage = Image.new("L", ((32 + 2) * numImagesPerRowInMaster,
                                  (32 + 2) * numImagesPerRowInMaster), 255)

    for rank, col in enumerate(columnList):
        #print "Col=",col,"rank=",rank,"dutyCycle=",dutyCycles[col]
        pyPerm = numpy.zeros(sp.getNumInputs(), dtype=GetNTAReal())
        sp.getPermanence(col, pyPerm)

        # Create small image for each column
        pyPerm = pyPerm / pyPerm.max()
        pyPerm = (pyPerm * 255.0)
        pyPerm = pyPerm.reshape((32, 32))
        pyPerm = (pyPerm).astype('uint8')
        img = Image.fromarray(pyPerm)

        # Paste it into master image
        if rank < numImagesPerRowInMaster * numImagesPerRowInMaster:
            x = rank % numImagesPerRowInMaster * (32 + 2)
            y = (rank / numImagesPerRowInMaster) * (32 + 2)
            masterImage.paste(img, (x, y))

    # Save master image
    masterImage.save("master_%05d.png" % (iteration))
Ejemplo n.º 44
0
def inspect(element, showRun=True, icon=None):
    """
  Launch an Inspector for the provided element.

  element -- A network, region or a path to a network directory.
  showRun -- Whether to show the RuntimeInspector in the dropdown, which lets
             the user run the network.
  """
    if isinstance(element, basestring):
        element = Network(element)
    else:
        assert isinstance(element, Network)

    if len(element.regions) == 0:
        raise Exception("Unable to inspect an empty network")

    # Network must be initialized before it can be inspected
    element.initialize()

    from wx import GetApp, PySimpleApp

    if GetApp():
        useApp = True
    else:
        useApp = False

    from nupic.analysis.inspectors.MultiInspector import MultiInspector

    if not useApp:
        app = PySimpleApp()

    inspector = MultiInspector(element=element, showRun=showRun, icon=icon)

    if not useApp:
        app.MainLoop()
        app.Destroy()
    else:
        return inspector
Ejemplo n.º 45
0
def inspect(element, showRun=True, icon=None):
    """
  Launch an Inspector for the provided element.

  element -- A network, region or a path to a network directory.
  showRun -- Whether to show the RuntimeInspector in the dropdown, which lets
             the user run the network.
  """
    if isinstance(element, basestring):
        element = Network(element)
    else:
        assert isinstance(element, Network)

    if len(element.regions) == 0:
        raise Exception('Unable to inspect an empty network')

    # Network must be initialized before it can be inspected
    element.initialize()

    from wx import GetApp, PySimpleApp

    if GetApp():
        useApp = True
    else:
        useApp = False

    from nupic.analysis.inspectors.MultiInspector import MultiInspector

    if not useApp:
        app = PySimpleApp()

    inspector = MultiInspector(element=element, showRun=showRun, icon=icon)

    if not useApp:
        app.MainLoop()
        app.Destroy()
    else:
        return inspector
Ejemplo n.º 46
0
  def _testNetLoad(self):
    """Test loading a network with this sensor in it."""
    n = Network()
    r = n.addRegion(self.nodeName, self.sensorName, '{ activeOutputCount: 11}')
    r.dimensions = Dimensions([1])
    n.save(self.filename)

    n = Network(self.filename)
    n.initialize()
    self.testsPassed += 1

    # Check that vectorCount parameter is zero
    r = n.regions[self.nodeName]

    res = r.getParameter('vectorCount')
    self.assertEqual(
        res, 0, "getting vectorCount:\n Expected '0',  got back  '%d'\n" % res)

    self.sensor = r
Ejemplo n.º 47
0
def createNetwork(dataSource):
  """Create and initialize a network."""
  with open(_PARAMS_PATH, "r") as f:
    modelParams = yaml.safe_load(f)["modelParams"]

  # Create a network that will hold the regions.
  network = Network()

  # Add a sensor region.
  network.addRegion("sensor", "py.RecordSensor", '{}')

  # Set the encoder and data source of the sensor region.
  sensorRegion = network.regions["sensor"].getSelf()
  sensorRegion.encoder = createEncoder(modelParams["sensorParams"]["encoders"])
  sensorRegion.dataSource = dataSource

  # Make sure the SP input width matches the sensor region output width.
  modelParams["spParams"]["inputWidth"] = sensorRegion.encoder.getWidth()

  # Add SP and TM regions.
  network.addRegion("SP", "py.SPRegion", json.dumps(modelParams["spParams"]))
  network.addRegion("TM", "py.TMRegion", json.dumps(modelParams["tmParams"]))

  # Add a classifier region.
  clName = "py.%s" % modelParams["clParams"].pop("regionName")
  network.addRegion("classifier", clName, json.dumps(modelParams["clParams"]))

  # Add all links
  createSensorToClassifierLinks(network, "sensor", "classifier")
  createDataOutLink(network, "sensor", "SP")
  createFeedForwardLink(network, "SP", "TM")
  createFeedForwardLink(network, "TM", "classifier")
  # Reset links are optional, since the sensor region does not send resets.
  createResetLink(network, "sensor", "SP")
  createResetLink(network, "sensor", "TM")

  # Make sure all objects are initialized.
  network.initialize()

  return network
Ejemplo n.º 48
0
def createNetwork(networkConfig):
    """
  Create and initialize the specified network instance.

  @param networkConfig: (dict) the configuration of this network.
  @return network: (Network) The actual network
  """

    registerAllResearchRegions()

    network = Network()

    if networkConfig["networkType"] == "L4L2Column":
        return createL4L2Column(network, networkConfig, "_0")
    elif networkConfig["networkType"] == "MultipleL4L2Columns":
        return createMultipleL4L2Columns(network, networkConfig)
Ejemplo n.º 49
0
  def loadFromFile(self, filename):
    """ Load a serialized network
    :param filename: Where the network should be loaded from
    """
    print "Loading network from {file}...".format(file=filename)
    Network.unregisterRegion(SaccadeSensor.__name__)
    Network.registerRegion(SaccadeSensor)

    Network.registerRegion(ExtendedTMRegion)

    self.net = Network(filename)

    self.networkSensor = self.net.regions["sensor"]
    self.networkSensor.setParameter("numSaccades", SACCADES_PER_IMAGE_TESTING)

    self.networkSP = self.net.regions["SP"]
    self.networkClassifier = self.net.regions["classifier"]

    self.numCorrect = 0
  def testOverlap(self):
    """Create a simple network to test the region."""

    rawParams = {"outputWidth": 8 * 2048}
    net = Network()
    rawSensor = net.addRegion("raw", "py.RawSensor", json.dumps(rawParams))
    l2c = net.addRegion("L2", "py.ColumnPoolerRegion", "")
    net.link("raw", "L2", "UniformLink", "")

    self.assertEqual(rawSensor.getParameter("outputWidth"),
                     l2c.getParameter("inputWidth"),
                     "Incorrect outputWidth parameter")

    rawSensorPy = rawSensor.getSelf()
    rawSensorPy.addDataToQueue([2, 4, 6], 0, 42)
    rawSensorPy.addDataToQueue([2, 42, 1023], 1, 43)
    rawSensorPy.addDataToQueue([18, 19, 20], 0, 44)

    # Run the network and check outputs are as expected
    net.run(3)
Ejemplo n.º 51
0
  def _testNetLoad(self):
    """Test loading a network with this sensor in it."""
    n = Network()
    r = n.addRegion(self.nodeName, self.sensorName, '{ activeOutputCount: 11}')
    r.dimensions = Dimensions([1])
    n.save(self.filename)

    n = Network(self.filename)
    n.initialize()
    self.testsPassed += 1

    # Check that vectorCount parameter is zero
    r = n.regions[self.nodeName]

    res = r.getParameter('vectorCount')
    self.assertEqual(
        res, 0, "getting vectorCount:\n Expected '0',  got back  '%d'\n" % res)

    self.sensor = r
Ejemplo n.º 52
0
def saveAndLoadNetwork(network):
  # Save network
  proto1 = NetworkProto_capnp.NetworkProto.new_message()
  network.write(proto1)

  with tempfile.TemporaryFile() as f:
    proto1.write(f)
    f.seek(0)

    # Load network
    proto2 = NetworkProto_capnp.NetworkProto.read(f)
    loadedNetwork = Network.read(proto2)

    # Set loaded network's datasource
    sensor = network.regions["sensor"].getSelf()
    loadedSensor = loadedNetwork.regions["sensor"].getSelf()
    loadedSensor.dataSource = sensor.dataSource

    # Initialize loaded network
    loadedNetwork.initialize()

  return loadedNetwork
Ejemplo n.º 53
0
  def testNetworkCreate(self):
    """Create a simple network to test the region."""

    rawParams = {"outputWidth": 16*2048}
    net = Network()
    rawSensor = net.addRegion("raw","py.RawSensor", json.dumps(rawParams))
    l2c = net.addRegion("L2", "py.L2Column", "")
    net.link("raw", "L2", "UniformLink", "")

    self.assertEqual(rawSensor.getParameter("outputWidth"),
                     l2c.getParameter("inputWidth"),
                     "Incorrect outputWidth parameter")

    rawSensorPy = rawSensor.getSelf()
    rawSensorPy.addDataToQueue([2, 4, 6], 0, 42)
    rawSensorPy.addDataToQueue([2, 42, 1023], 1, 43)
    rawSensorPy.addDataToQueue([18, 19, 20], 0, 44)

    # Run the network and check outputs are as expected
    net.run(3)
Ejemplo n.º 54
0
def saveAndLoadNetwork(network):
    # Save network
    proto1 = NetworkProto_capnp.NetworkProto.new_message()
    network.write(proto1)

    with tempfile.TemporaryFile() as f:
        proto1.write(f)
        f.seek(0)

        # Load network
        proto2 = NetworkProto_capnp.NetworkProto.read(f)
        loadedNetwork = Network.read(proto2)

        # Set loaded network's datasource
        sensor = network.regions["sensor"].getSelf()
        loadedSensor = loadedNetwork.regions["sensor"].getSelf()
        loadedSensor.dataSource = sensor.dataSource

        # Initialize loaded network
        loadedNetwork.initialize()

    return loadedNetwork
Ejemplo n.º 55
0

  def getOutputElementCount(self, outputName):
    """Returns the width of dataOut."""

    # Check if classifier has a 'maxCategoryCount' attribute
    if not hasattr(self, "maxCategoryCount"):
      # Large default value for backward compatibility 
      self.maxCategoryCount = 1000

    if outputName == "categoriesOut":
      return len(self.stepsList)
    elif outputName == "probabilities":
      return len(self.stepsList) * self.maxCategoryCount
    elif outputName == "actualValues":
      return self.maxCategoryCount
    else:
      raise ValueError("Unknown output {}.".format(outputName))



if __name__ == "__main__":
  from nupic.engine import Network

  n = Network()
  classifier = n.addRegion(
    'classifier',
    'py.CLAClassifierRegion',
    '{ steps: "1,2", maxAge: 1000}'
  )
Ejemplo n.º 56
0
def createNetwork():
  network = Network()

  #
  # Sensors
  #

  # C++
  consumptionSensor = network.addRegion('consumptionSensor', 'ScalarSensor',
                                        json.dumps({'n': 120,
                                                    'w': 21,
                                                    'minValue': 0.0,
                                                    'maxValue': 100.0,
                                                    'clipInput': True}))

  # Python
  timestampSensor = network.addRegion("timestampSensor",
                                      'py.PluggableEncoderSensor', "")
  timestampSensor.getSelf().encoder = DateEncoder(timeOfDay=(21, 9.5),
                                                  name="timestamp_timeOfDay")

  #
  # Add a SPRegion, a region containing a spatial pooler
  #
  consumptionEncoderN = consumptionSensor.getParameter('n')
  timestampEncoderN = timestampSensor.getSelf().encoder.getWidth()
  inputWidth = consumptionEncoderN + timestampEncoderN

  network.addRegion("sp", "py.SPRegion",
                    json.dumps({
                      "spatialImp": "cpp",
                      "globalInhibition": 1,
                      "columnCount": 2048,
                      "inputWidth": inputWidth,
                      "numActiveColumnsPerInhArea": 40,
                      "seed": 1956,
                      "potentialPct": 0.8,
                      "synPermConnected": 0.1,
                      "synPermActiveInc": 0.0001,
                      "synPermInactiveDec": 0.0005,
                      "maxBoost": 1.0,
                    }))

  #
  # Input to the Spatial Pooler
  #
  network.link("consumptionSensor", "sp", "UniformLink", "")
  network.link("timestampSensor", "sp", "UniformLink", "")

  #
  # Add a TPRegion, a region containing a Temporal Memory
  #
  network.addRegion("tm", "py.TPRegion",
                    json.dumps({
                      "columnCount": 2048,
                      "cellsPerColumn": 32,
                      "inputWidth": 2048,
                      "seed": 1960,
                      "temporalImp": "cpp",
                      "newSynapseCount": 20,
                      "maxSynapsesPerSegment": 32,
                      "maxSegmentsPerCell": 128,
                      "initialPerm": 0.21,
                      "permanenceInc": 0.1,
                      "permanenceDec": 0.1,
                      "globalDecay": 0.0,
                      "maxAge": 0,
                      "minThreshold": 9,
                      "activationThreshold": 12,
                      "outputType": "normal",
                      "pamLength": 3,
                    }))

  network.link("sp", "tm", "UniformLink", "")
  network.link("tm", "sp", "UniformLink", "", srcOutput="topDownOut",
               destInput="topDownIn")

  # Enable anomalyMode so the tm calculates anomaly scores
  network.regions['tm'].setParameter("anomalyMode", True)
  # Enable inference mode to be able to get predictions
  network.regions['tm'].setParameter("inferenceMode", True)

  return network
class ClaClassifier():

    def __init__(self, net_structure, sensor_params, dest_region_params, class_encoder_params):

        self.run_number = 0

        # for classifier
        self.classifier_encoder_list = {}
        self.classifier_input_list   = {}
        self.prevPredictedColumns    = {}

        # TODO: 消したいパラメータ
        self.predict_value = class_encoder_params.keys()[0]
        self.predict_step  = 0


        # default param
        self.default_params = {
            'SP_PARAMS':  {
                "spVerbosity": 0,
                "spatialImp": "cpp",
                "globalInhibition": 1,
                "columnCount": 2024,
                "inputWidth": 0,             # set later
                "numActiveColumnsPerInhArea": 20,
                "seed": 1956,
                "potentialPct": 0.8,
                "synPermConnected": 0.1,
                "synPermActiveInc": 0.05,
                "synPermInactiveDec": 0.0005,
                "maxBoost": 2.0,
                },
            'TP_PARAMS': {
                "verbosity": 0,
                "columnCount": 2024,
                "cellsPerColumn": 32,
                "inputWidth": 2024,
                "seed": 1960,
                "temporalImp": "cpp",
                "newSynapseCount": 20,
                "maxSynapsesPerSegment": 32,
                "maxSegmentsPerCell": 128,
                "initialPerm": 0.21,
                "permanenceInc": 0.2,
                "permanenceDec": 0.1,
                "globalDecay": 0.0,
                "maxAge": 0,
                "minThreshold": 12,
                "activationThreshold": 16,
                "outputType": "normal",
                "pamLength": 1,
                },
            'CLASSIFIER_PARAMS':  {
                "clVerbosity": 0,
                "alpha": 0.005,
                "steps": "0"
                }
            }

        # tp
        self.tp_enable = True

        # net structure
        self.net_structure = OrderedDict()
        self.net_structure['sensor3'] = ['region1']
        self.net_structure['region1'] = ['region2']

        self.net_structure = net_structure

        # region change params
        self.dest_region_params = dest_region_params

        # sensor change params
        self.sensor_params = sensor_params

        self.class_encoder_params = class_encoder_params

        self._createNetwork()


    def _makeRegion(self, name, params):
        sp_name    = "sp_" + name
        if self.tp_enable:
            tp_name    = "tp_" + name
        class_name = "class_" + name

        # addRegion
        self.network.addRegion(sp_name, "py.SPRegion", json.dumps(params['SP_PARAMS']))
        if self.tp_enable:
            self.network.addRegion(tp_name, "py.TPRegion", json.dumps(params['TP_PARAMS']))
        self.network.addRegion( class_name, "py.CLAClassifierRegion", json.dumps(params['CLASSIFIER_PARAMS']))

        encoder = MultiEncoder()
        encoder.addMultipleEncoders(self.class_encoder_params)
        self.classifier_encoder_list[class_name]  = encoder
        if self.tp_enable:
            self.classifier_input_list[class_name]    = tp_name
        else:
            self.classifier_input_list[class_name]    = sp_name

    def _linkRegion(self, src_name, dest_name):
        sensor     =  src_name
        sp_name    = "sp_" + dest_name
        tp_name    = "tp_" + dest_name
        class_name = "class_" + dest_name

        if self.tp_enable:
            self.network.link(sensor, sp_name, "UniformLink", "")
            self.network.link(sp_name, tp_name, "UniformLink", "")
            self.network.link(tp_name, class_name, "UniformLink", "")
        else:
            self.network.link(sensor, sp_name, "UniformLink", "")
            self.network.link(sp_name, class_name, "UniformLink", "")


    def _initRegion(self, name):
        sp_name = "sp_"+ name
        tp_name = "tp_"+ name
        class_name = "class_"+ name

        # setting sp
        SP = self.network.regions[sp_name]
        SP.setParameter("learningMode", True)
        SP.setParameter("anomalyMode", True)

        # # setting tp
        if self.tp_enable:
            TP = self.network.regions[tp_name]
            TP.setParameter("topDownMode", False)
            TP.setParameter("learningMode", True)
            TP.setParameter("inferenceMode", True)
            TP.setParameter("anomalyMode", False)

        # classifier regionを定義.
        classifier = self.network.regions[class_name]
        classifier.setParameter('inferenceMode', True)
        classifier.setParameter('learningMode', True)


    def _createNetwork(self):

        def deepupdate(original, update):
            """
            Recursively update a dict.
            Subdict's won't be overwritten but also updated.
            """
            if update is None:
                return None
            for key, value in original.iteritems():
                if not key in update:
                    update[key] = value
                elif isinstance(value, dict):
                    deepupdate(value, update[key])
            return update


        self.network = Network()

        # check
        # if self.selectivity not in self.dest_region_params.keys():
        #     raise Exception, "There is no selected region : " + self.selectivity
        if not len(self.net_structure.keys()) == len(set(self.net_structure.keys())):
            raise Exception, "There is deplicated net_structure keys : " + self.net_structure.keys()

        # sensor
        for sensor_name, params in self.sensor_params.items():
            self.network.addRegion(sensor_name, "py.RecordSensor", json.dumps({"verbosity": 0}))
            sensor = self.network.regions[sensor_name].getSelf()

            # set encoder
            #params = deepupdate(cn.SENSOR_PARAMS, params)
            encoder = MultiEncoder()
            encoder.addMultipleEncoders( params )
            sensor.encoder         = encoder
            sensor.dataSource      = DataBuffer()


        # network
        print 'create element ...'
        for name in self.dest_region_params.keys():
            change_params = self.dest_region_params[name]
            params = deepupdate(self.default_params, change_params)
            # input width
            input_width = 0
            for source in [s for s,d in self.net_structure.items() if name in d]:
                if source in self.sensor_params.keys():
                    sensor = self.network.regions[source].getSelf()
                    input_width += sensor.encoder.getWidth()
                else:
                    input_width += params['TP_PARAMS']['cellsPerColumn'] * params['TP_PARAMS']['columnCount']

            params['SP_PARAMS']['inputWidth'] = input_width
            self._makeRegion(name, params)

        # link
        print 'link network ...'
        for source, dest_list in self.net_structure.items():
            for dest in dest_list:
                if source in self.sensor_params.keys():
                    self._linkRegion(source, dest)
                else:
                    if self.tp_enable:
                        self._linkRegion("tp_" + source, dest)
                    else:
                        self._linkRegion("sp_" + source, dest)

        # initialize
        print 'initializing network ...'
        self.network.initialize()
        for name in self.dest_region_params.keys():
            self._initRegion(name)

        return


    #@profile
    def run(self, input_data, learn=True, class_learn=True,learn_layer=None):
        """
        networkの実行.
        学習したいときは, learn=True, ftypeを指定する.
        予測したいときは, learn=False, ftypeはNoneを指定する.
        学習しているときも, 予測はしているがな.

        input_data = {'xy_value': [1.0, 2.0], 'ftype': 'sin'}
        """

        self.enable_learning_mode(learn, learn_layer)
        self.enable_class_learning_mode(class_learn)

        self.run_number += 1

        # calc encoder, SP, TP
        for sensor_name in self.sensor_params.keys():
            self.network.regions[sensor_name].getSelf().dataSource.push(input_data)
        self.network.run(1)
        #self.layer_output(input_data)
        #self.debug(input_data)


        # learn classifier
        inferences = {}
        for name in self.dest_region_params.keys():
            class_name = "class_" + name
            inferences['classifier_'+name]   = self._learn_classifier_multi(class_name, actValue=input_data[self.predict_value], pstep=self.predict_step)



        # anomaly
        #inferences["anomaly"] = self._calc_anomaly()

        return inferences


    def _learn_classifier_multi(self, region_name, actValue=None, pstep=0):
        """
        classifierの計算を行う.

        直接customComputeを呼び出さずに, network.runの中でやりたいところだけど,
        計算した内容の取り出し方法がわからない.
        """

        # TODO: networkとclassifierを完全に切り分けたいな.
        #       networkでは, sensor,sp,tpまで計算を行う.
        #       その計算結果の評価/利用は外に出す.

        classifier     = self.network.regions[region_name]
        encoder        = self.classifier_encoder_list[region_name].getEncoderList()[0]
        class_input    = self.classifier_input_list[region_name]
        tp_bottomUpOut = self.network.regions[class_input].getOutputData("bottomUpOut").nonzero()[0]
        #tp_bottomUpOut = self.network.regions["TP"].getSelf()._tfdr.infActiveState['t'].reshape(-1).nonzero()[0]

        if actValue is not None:
            bucketIdx = encoder.getBucketIndices(actValue)[0]
            classificationIn = {
                    'bucketIdx': bucketIdx,
                    'actValue': actValue
                    }
        else:
            classificationIn = {'bucketIdx': 0,'actValue': 'no'}
        clResults = classifier.getSelf().customCompute(
                recordNum=self.run_number,
                patternNZ=tp_bottomUpOut,
                classification=classificationIn
                )

        inferences= self._get_inferences(clResults, pstep, summary_tyep='sum')

        return inferences

    def _get_inferences(self, clResults, steps, summary_tyep='sum'):
        """
        classifierの計算結果を使いやすいように変更するだけ.
        """

        likelihoodsVec = clResults[steps]
        bucketValues   = clResults['actualValues']

        likelihoodsDict = defaultdict(int)
        bestActValue = None
        bestProb = None

        if summary_tyep == 'sum':
            for (actValue, prob) in zip(bucketValues, likelihoodsVec):
                likelihoodsDict[actValue] += prob
                if bestProb is None or likelihoodsDict[actValue] > bestProb:
                    bestProb = likelihoodsDict[actValue]
                    bestActValue = actValue

        elif summary_tyep == 'best':
            for (actValue, prob) in zip(bucketValues, likelihoodsVec):
                if bestProb is None or prob > bestProb:
                    likelihoodsDict[actValue] = prob
                    bestProb = prob
                    bestActValue = actValue

        return {'likelihoodsDict': likelihoodsDict, 'best': {'value': bestActValue, 'prob':bestProb}}


    def _calc_anomaly(self):
        """
        各層のanomalyを計算
        """

        score = 0
        anomalyScore = {}
        for name in self.dest_region_params.keys():
            #sp_bottomUpOut = self.network.regions["sp_"+name].getOutputData("bottomUpOut").nonzero()[0]
            sp_bottomUpOut = self.network.regions["tp_"+name].getInputData("bottomUpIn").nonzero()[0]

            if self.prevPredictedColumns.has_key(name):
                score = computeAnomalyScore(sp_bottomUpOut, self.prevPredictedColumns[name])
            #topdown_predict = self.network.regions["TP"].getSelf()._tfdr.topDownCompute().copy().nonzero()[0]
            topdown_predict = self.network.regions["tp_"+name].getSelf()._tfdr.topDownCompute().nonzero()[0]
            self.prevPredictedColumns[name] = copy.deepcopy(topdown_predict)

            anomalyScore[name] = score

        return anomalyScore

    def reset(self):
        """
        reset sequence
        """
        # for name in self.dest_region_params.keys():
        #     self.network.regions["tp_"+name].getSelf().resetSequenceStates()
        return

        # for sensor_name in self.sensor_params.keys():
        #     sensor = self.network.regions[sensor_name].getSelf()
        #     sensor.dataSource = DataBuffer()

    def enable_class_learning_mode(self, enable):
        for name in self.dest_region_params.keys():
            self.network.regions["class_"+name].setParameter("learningMode", enable)

    def enable_learning_mode(self, enable, layer_name = None):
        """
        各層のSP, TP, ClassifierのlearningModeを変更
        """
        if layer_name is None:
            for name in self.dest_region_params.keys():
                self.network.regions["sp_"+name].setParameter("learningMode", enable)
                if self.tp_enable:
                    self.network.regions["tp_"+name].setParameter("learningMode", enable)
                self.network.regions["class_"+name].setParameter("learningMode", enable)
        else:
            for name in self.dest_region_params.keys():
                self.network.regions["sp_"+name].setParameter("learningMode", not enable)
                if self.tp_enable:
                    self.network.regions["tp_"+name].setParameter("learningMode", not enable)
                self.network.regions["class_"+name].setParameter("learningMode", not enable)
            for name in layer_name:
                self.network.regions["sp_"+name].setParameter("learningMode", enable)
                if self.tp_enable:
                    self.network.regions["tp_"+name].setParameter("learningMode", enable)
                self.network.regions["class_"+name].setParameter("learningMode", enable)


    def print_inferences(self, input_data, inferences):
        """
        計算結果を出力する
        """

        # print "%10s, %10s, %1s" % (
        #         int(input_data['xy_value'][0]),
        #         int(input_data['xy_value'][1]),
        #         input_data['label'][:1]),
        print "%5s" % (
                input_data['label']),

        try:
            for name in sorted(self.dest_region_params.keys()):
                print "%5s" % (inferences['classifier_'+name]['best']['value']),

            for name in sorted(self.dest_region_params.keys()):
                print "%6.4f," % (inferences['classifier_'+name]['likelihoodsDict'][input_data[self.predict_value]]),
        except:
            pass

        # for name in sorted(self.dest_region_params.keys()):
        #     print "%3.2f," % (inferences["anomaly"][name]),

        # for name in sorted(self.dest_region_params.keys()):
        #     print "%5s," % name,

        print

    def layer_output(self, input_data, region_name=None):
        if region_name is not None:
            Region = self.network.regions[region_name]
            print Region.getOutputData("bottomUpOut").nonzero()[0]
            return

        for name in self.dest_region_params.keys():
            SPRegion = self.network.regions["sp_"+name]
            if self.tp_enable:
                TPRegion = self.network.regions["tp_"+name]

            print "#################################### ", name
            print
            print "==== SP layer ===="
            print "input:  ", SPRegion.getInputData("bottomUpIn").nonzero()[0][:20]
            print "output: ", SPRegion.getOutputData("bottomUpOut").nonzero()[0][:20]
            print
            if self.tp_enable:
                print "==== TP layer ===="
                print "input:  ", TPRegion.getInputData("bottomUpIn").nonzero()[0][:20]
                print "output: ", TPRegion.getOutputData("bottomUpOut").nonzero()[0][:20]
                print
            print "==== Predict ===="
            print TPRegion.getSelf()._tfdr.topDownCompute().copy().nonzero()[0][:20]
            print

    def save(self, path):
        import pickle
        with open(path, 'wb') as modelPickleFile:
            pickle.dump(self, modelPickleFile)
    def _createNetwork(self):

        def deepupdate(original, update):
            """
            Recursively update a dict.
            Subdict's won't be overwritten but also updated.
            """
            if update is None:
                return None
            for key, value in original.iteritems():
                if not key in update:
                    update[key] = value
                elif isinstance(value, dict):
                    deepupdate(value, update[key])
            return update


        self.network = Network()

        # check
        # if self.selectivity not in self.dest_region_params.keys():
        #     raise Exception, "There is no selected region : " + self.selectivity
        if not len(self.net_structure.keys()) == len(set(self.net_structure.keys())):
            raise Exception, "There is deplicated net_structure keys : " + self.net_structure.keys()

        # sensor
        for sensor_name, params in self.sensor_params.items():
            self.network.addRegion(sensor_name, "py.RecordSensor", json.dumps({"verbosity": 0}))
            sensor = self.network.regions[sensor_name].getSelf()

            # set encoder
            #params = deepupdate(cn.SENSOR_PARAMS, params)
            encoder = MultiEncoder()
            encoder.addMultipleEncoders( params )
            sensor.encoder         = encoder
            sensor.dataSource      = DataBuffer()


        # network
        print 'create element ...'
        for name in self.dest_region_params.keys():
            change_params = self.dest_region_params[name]
            params = deepupdate(self.default_params, change_params)
            # input width
            input_width = 0
            for source in [s for s,d in self.net_structure.items() if name in d]:
                if source in self.sensor_params.keys():
                    sensor = self.network.regions[source].getSelf()
                    input_width += sensor.encoder.getWidth()
                else:
                    input_width += params['TP_PARAMS']['cellsPerColumn'] * params['TP_PARAMS']['columnCount']

            params['SP_PARAMS']['inputWidth'] = input_width
            self._makeRegion(name, params)

        # link
        print 'link network ...'
        for source, dest_list in self.net_structure.items():
            for dest in dest_list:
                if source in self.sensor_params.keys():
                    self._linkRegion(source, dest)
                else:
                    if self.tp_enable:
                        self._linkRegion("tp_" + source, dest)
                    else:
                        self._linkRegion("sp_" + source, dest)

        # initialize
        print 'initializing network ...'
        self.network.initialize()
        for name in self.dest_region_params.keys():
            self._initRegion(name)

        return