Example #1
0
def _createNetwork():
    """Create a network with a RecordSensor region and a SDRClassifier region"""

    network = Network()
    network.addRegion('sensor', 'py.RecordSensor', '{}')
    network.addRegion('classifier', 'py.SDRClassifierRegion', '{}')
    _createSensorToClassifierLinks(network, 'sensor', 'classifier')

    # Add encoder to sensor region.
    sensorRegion = network.regions['sensor'].getSelf()
    encoderParams = {
        'consumption': {
            'fieldname': 'consumption',
            'resolution': 0.88,
            'seed': 1,
            'name': 'consumption',
            'type': 'RandomDistributedScalarEncoder'
        }
    }

    encoder = MultiEncoder()
    encoder.addMultipleEncoders(encoderParams)
    sensorRegion.encoder = encoder

    # Add data source.
    testDir = os.path.dirname(os.path.abspath(__file__))
    inputFile = os.path.join(testDir, 'fixtures', 'gymdata-test.csv')
    dataSource = FileRecordStream(streamID=inputFile)
    sensorRegion.dataSource = dataSource

    # Get and set what field index we want to predict.
    network.regions['sensor'].setParameter('predictedField', 'consumption')

    return network
def createEncoder():
    """
  Creates and returns a #MultiEncoder including a ScalarEncoder for
  energy consumption and a DateEncoder for the time of the day.

  @see nupic/encoders/__init__.py for type to file-name mapping
  @see nupic/encoders for encoder source files
  """
    encoder = MultiEncoder()
    encoder.addMultipleEncoders({
        "consumption": {
            "fieldname": u"consumption",
            "type": "ScalarEncoder",
            "name": u"consumption",
            "minval": 0.0,
            "maxval": 100.0,
            "clipInput": True,
            "w": 21,
            "n": 500
        },
        "timestamp_timeOfDay": {
            "fieldname": u"timestamp",
            "type": "DateEncoder",
            "name": u"timestamp_timeOfDay",
            "timeOfDay": (21, 9.5)
        }
    })
    return encoder
Example #3
0
class RDSEEncoder():
    def __init__(self, resolution=.5):
        """Create the encoder instance for our test and return it."""
        self.resolution = resolution
        self.series_encoder = RandomDistributedScalarEncoder(
            self.resolution, name="RDSE-(res={})".format(self.resolution))
        self.encoder = MultiEncoder()
        self.encoder.addEncoder("series", self.series_encoder)
        self.last_m_encode = np.zeros(1)

    def get_encoder(self):
        return self.encoder

    def get_resolution(self):
        return self.resolution

    def m_encode(self, inputData):
        self.last_m_encode = self.encoder.encode(inputData)
        return self.last_m_encode

    def m_overlap(self, inputData):
        temp = self.last_m_encode
        self.last_m_encode = self.encoder.encode(inputData)
        return numpy.sum(numpy.multiply(self.last_m_encode, temp))

    def r_encode(self, inputData):
        return self.series_encoder.encode(inputData)

    def r_overlap(self, inputA, inputB):
        return numpy.sum(
            numpy.multiply(self.series_encoder.encode(inputA),
                           self.series_encoder.encode(inputB)))
def _createNetwork():
  """Create network with one RecordSensor region."""
  network = Network()
  network.addRegion('sensor', 'py.RecordSensor', '{}')
  sensorRegion = network.regions['sensor'].getSelf()

  # Add an encoder.
  encoderParams = {'consumption': {'fieldname': 'consumption',
                                   'resolution': 0.88,
                                   'seed': 1,
                                   'name': 'consumption',
                                   'type': 'RandomDistributedScalarEncoder'}}

  encoder = MultiEncoder()
  encoder.addMultipleEncoders(encoderParams)
  sensorRegion.encoder = encoder

  # Add a data source.
  testDir = os.path.dirname(os.path.abspath(__file__))
  inputFile = os.path.join(testDir, 'fixtures', 'gymdata-test.csv')
  dataSource = FileRecordStream(streamID=inputFile)
  sensorRegion.dataSource = dataSource

  # Get and set what field index we want to predict.
  network.regions['sensor'].setParameter('predictedField', 'consumption')

  return network
def createEncoder(newEncoders):
  """
  Creates and returns a MultiEncoder.

  @param newEncoders    (dict)          Keys are the encoders' names, values are
      dicts of the params; an example is shown below.

  @return encoder       (MultiEncoder)  See nupic.encoders.multi.py.

  Example input:
    {"energy": {"fieldname": u"energy",
                "type": "ScalarEncoder",
                "name": u"consumption",
                "minval": 0.0,
                "maxval": 100.0,
                "w": 21,
                "n": 500},
     "timestamp": {"fieldname": u"timestamp",
                   "type": "DateEncoder",
                   "name": u"timestamp_timeOfDay",
                   "timeOfDay": (21, 9.5)},
    }
  """
  encoder = MultiEncoder()
  encoder.addMultipleEncoders(newEncoders)
  return encoder
def createEncoder(newEncoders):
  """
  Creates and returns a MultiEncoder.

  @param newEncoders    (dict)          Keys are the encoders' names, values are
      dicts of the params; an example is shown below.

  @return encoder       (MultiEncoder)  See nupic.encoders.multi.py.

  Example input:
    {"energy": {"fieldname": u"energy",
                "type": "ScalarEncoder",
                "name": u"consumption",
                "minval": 0.0,
                "maxval": 100.0,
                "w": 21,
                "n": 500},
     "timestamp": {"fieldname": u"timestamp",
                   "type": "DateEncoder",
                   "name": u"timestamp_timeOfDay",
                   "timeOfDay": (21, 9.5)},
    }
  """
  encoder = MultiEncoder()
  encoder.addMultipleEncoders(newEncoders)
  return encoder
    def _addRegion(self, src_name, dest_name, params):

        sensor     =  src_name
        sp_name    = "sp_" + dest_name
        tp_name    = "tp_" + dest_name
        class_name = "class_" + dest_name

        try:
            self.network.regions[sp_name]
            self.network.regions[tp_name]
            self.network.regions[class_name]
            self.network.link(sensor, sp_name, "UniformLink", "")

        except Exception as e:
            # sp
            self.network.addRegion(sp_name, "py.SPRegion", json.dumps(params['SP_PARAMS']))
            self.network.link(sensor, sp_name, "UniformLink", "")

            # tp
            self.network.addRegion(tp_name, "py.TPRegion", json.dumps(params['TP_PARAMS']))
            self.network.link(sp_name, tp_name, "UniformLink", "")

            # class
            self.network.addRegion( class_name, "py.CLAClassifierRegion", json.dumps(params['CLASSIFIER_PARAMS']))
            self.network.link(tp_name, class_name, "UniformLink", "")

            encoder = MultiEncoder()
            encoder.addMultipleEncoders(params['CLASSIFIER_ENCODE_PARAMS'])
            self.classifier_encoder_list[class_name]  = encoder
            self.classifier_input_list[class_name]    = tp_name
Example #8
0
def _createEncoder():
    """Create the encoder instance for our test and return it."""
    encoder = MultiEncoder()
    encoder.addMultipleEncoders({
        'timestamp':
        dict(fieldname='timestamp',
             type='DateEncoder',
             timeOfDay=(5, 5),
             forced=True),
        'attendeeCount':
        dict(fieldname='attendeeCount',
             type='ScalarEncoder',
             name='attendeeCount',
             minval=0,
             maxval=270,
             clipInput=True,
             w=5,
             resolution=10,
             forced=True),
        'consumption':
        dict(fieldname='consumption',
             type='ScalarEncoder',
             name='consumption',
             minval=0,
             maxval=115,
             clipInput=True,
             w=5,
             resolution=5,
             forced=True),
    })

    return encoder
def _createEncoder(encoders):
    """
  Creates and returns a MultiEncoder.

  @param encoders: (dict) Keys are the encoders' names, values are dicts of
  the params; an example is shown below.
  @return encoder: (MultiEncoder) See nupic.encoders.multi.py. Example input:
    {"energy": {"fieldname": u"energy",
                "type": "ScalarEncoder",
                "name": u"consumption",
                "minval": 0.0,
                "maxval": 100.0,
                "w": 21,
                "n": 500},
     "timestamp": {"fieldname": u"timestamp",
                   "type": "DateEncoder",
                   "name": u"timestamp_timeOfDay",
                   "timeOfDay": (21, 9.5)},
    }
  """
    if not isinstance(encoders, dict):
        raise TypeError("Encoders specified in incorrect format.")

    encoder = MultiEncoder()
    encoder.addMultipleEncoders(encoders)

    return encoder
Example #10
0
def _createEncoder(encoders):
  """
  Creates and returns a MultiEncoder.

  @param encoders: (dict) Keys are the encoders' names, values are dicts of
  the params; an example is shown below.
  @return encoder: (MultiEncoder) See nupic.encoders.multi.py. Example input:
    {"energy": {"fieldname": u"energy",
                "type": "ScalarEncoder",
                "name": u"consumption",
                "minval": 0.0,
                "maxval": 100.0,
                "w": 21,
                "n": 500},
     "timestamp": {"fieldname": u"timestamp",
                   "type": "DateEncoder",
                   "name": u"timestamp_timeOfDay",
                   "timeOfDay": (21, 9.5)},
    }
  """
  if not isinstance(encoders, dict):
    raise TypeError("Encoders specified in incorrect format.")

  encoder = MultiEncoder()
  encoder.addMultipleEncoders(encoders)

  return encoder
Example #11
0
def _createEncoder():
    """Create the encoder instance for our test and return it."""
    encoder = MultiEncoder()
    encoder.addMultipleEncoders(
        {
            "timestamp": dict(fieldname="timestamp", type="DateEncoder", timeOfDay=(5, 5)),
            "attendeeCount": dict(
                fieldname="attendeeCount",
                type="ScalarEncoder",
                name="attendeeCount",
                minval=0,
                maxval=270,
                clipInput=True,
                w=5,
                resolution=10,
            ),
            "consumption": dict(
                fieldname="consumption",
                type="ScalarEncoder",
                name="consumption",
                minval=0,
                maxval=115,
                clipInput=True,
                w=5,
                resolution=5,
            ),
        }
    )

    return encoder
def _createNetwork():
  """Create a network with a RecordSensor region and a SDRClassifier region"""

  network = Network()
  network.addRegion('sensor', 'py.RecordSensor', '{}')
  network.addRegion('classifier', 'py.SDRClassifierRegion', '{}')
  _createSensorToClassifierLinks(network, 'sensor', 'classifier')

  # Add encoder to sensor region.
  sensorRegion = network.regions['sensor'].getSelf()
  encoderParams = {'consumption': {'fieldname': 'consumption',
                                   'resolution': 0.88,
                                   'seed': 1,
                                   'name': 'consumption',
                                   'type': 'RandomDistributedScalarEncoder'}}

  encoder = MultiEncoder()
  encoder.addMultipleEncoders(encoderParams)
  sensorRegion.encoder = encoder

  # Add data source.
  testDir = os.path.dirname(os.path.abspath(__file__))
  inputFile = os.path.join(testDir, 'fixtures', 'gymdata-test.csv')
  dataSource = FileRecordStream(streamID=inputFile)
  sensorRegion.dataSource = dataSource

  # Get and set what field index we want to predict.
  predictedIdx = dataSource.getFieldNames().index('consumption')
  network.regions['sensor'].setParameter('predictedFieldIdx', predictedIdx)

  return network
Example #13
0
def _createNetwork():
    """Create network with one RecordSensor region."""
    network = Network()
    network.addRegion('sensor', 'py.RecordSensor', '{}')
    sensorRegion = network.regions['sensor'].getSelf()

    # Add an encoder.
    encoderParams = {
        'consumption': {
            'fieldname': 'consumption',
            'resolution': 0.88,
            'seed': 1,
            'name': 'consumption',
            'type': 'RandomDistributedScalarEncoder'
        }
    }

    encoder = MultiEncoder()
    encoder.addMultipleEncoders(encoderParams)
    sensorRegion.encoder = encoder

    # Add a data source.
    testDir = os.path.dirname(os.path.abspath(__file__))
    inputFile = os.path.join(testDir, 'fixtures', 'gymdata-test.csv')
    dataSource = FileRecordStream(streamID=inputFile)
    sensorRegion.dataSource = dataSource

    # Get and set what field index we want to predict.
    predictedIdx = dataSource.getFieldNames().index('consumption')
    network.regions['sensor'].setParameter('predictedFieldIdx', predictedIdx)

    return network
Example #14
0
    def initialize(self):
        """
        Initialize this node.
        """
        Node.initialize(self)

        # Initialize input bits
        self.bits = []
        for x in range(self.width):
            for y in range(self.height):
                bit = Bit()
                bit.x = x
                bit.y = y
                self.bits.append(bit)

        if self.data_source_type == DataSourceType.FILE:
            """
            Initialize this node opening the file and place cursor on the first record.
            """

            # If file name provided is a relative path, use project file path
            if self.file_name != '' and os.path.dirname(self.file_name) == '':
                full_file_name = os.path.dirname(Global.project.file_name) + '/' + self.file_name
            else:
                full_file_name = self.file_name

            # Check if file really exists
            if not os.path.isfile(full_file_name):
                QtWidgets.QMessageBox.warning(None, "Warning", "Input stream file '" + full_file_name + "' was not found or specified.", QtWidgets.QMessageBox.Ok)
                return

            # Create a data source for read the file
            self.data_source = FileRecordStream(full_file_name)

        elif self.data_source_type == DataSourceType.DATABASE:
            pass

        self.encoder = MultiEncoder()
        for encoding in self.encodings:
            encoding.initialize()

            # Create an instance class for an encoder given its module, class and constructor params
            encoding.encoder = getInstantiatedClass(encoding.encoder_module, encoding.encoder_class, encoding.encoder_params)

            # Take the first part of encoder field name as encoder name
            # Ex: timestamp_weekend.weekend => timestamp_weekend
            encoding.encoder.name = encoding.encoder_field_name.split('.')[0]

            # Add sub-encoder to multi-encoder list
            self.encoder.addEncoder(encoding.data_source_field_name, encoding.encoder)

        # If encoder size is not the same to sensor size then throws exception
        encoder_size = self.encoder.getWidth()
        sensor_size = self.width * self.height
        if encoder_size > sensor_size:
            QtWidgets.QMessageBox.warning(None, "Warning", "'" + self.name + "': Encoder size (" + str(encoder_size) + ") is different from sensor size (" + str(self.width) + " x " + str(self.height) + " = " + str(sensor_size) + ").", QtWidgets.QMessageBox.Ok)
            return

        return True
def createEncoder(rdse_resolution):
    """Create the encoder instance for our test and return it."""
    series_rdse = RandomDistributedScalarEncoder(
        rdse_resolution,
        name="rdse with resolution {}".format(rdse_resolution))
    encoder = MultiEncoder()
    encoder.addEncoder("series", series_rdse)
    return encoder
Example #16
0
def createEncoder(encoderParams):
    '''
  Create a multi-encoder from params.
  '''

    encoder = MultiEncoder()
    encoder.addMultipleEncoders(encoderParams)
    return encoder
Example #17
0
 def __init__(self, resolution=.5):
     """Create the encoder instance for our test and return it."""
     self.resolution = resolution
     self.series_encoder = RandomDistributedScalarEncoder(
         self.resolution, name="RDSE-(res={})".format(self.resolution))
     self.encoder = MultiEncoder()
     self.encoder.addEncoder("series", self.series_encoder)
     self.last_m_encode = np.zeros(1)
Example #18
0
def createEncoder():
    """Create the encoder instance for our test and return it."""
    consumption_encoder = ScalarEncoder(21, 0.0, 100.0, n=50, name="consumption", clipInput=True)
    time_encoder = DateEncoder(timeOfDay=(21, 9.5), name="timestamp_timeOfDay")

    encoder = MultiEncoder()
    encoder.addEncoder("consumption", consumption_encoder)
    encoder.addEncoder("timestamp", time_encoder)

    return encoder
def createSensors(network, sensors):
    for sensor in sensors:
        dataSource = FileRecordStream(streamID=sensor["source"])
        dataSource.setAutoRewind(True)
        encoder = MultiEncoder()
        encoder.addMultipleEncoders(fieldEncodings=sensor["encodings"])
        s = createRegion(network, sensor)
        s = s.getSelf()
        s.dataSource = dataSource
        s.encoder = encoder
    return network
Example #20
0
def createCategoryEncoder():
    encoder = MultiEncoder()
    encoder.addMultipleEncoders({
        "gym": {
            "type": "CategoryEncoder",
            "fieldname": u"gym",
            "name": u"gym",
            "categoryList": ['Hornsby', 'Melbourne', 'Epping', 'Chadstone', 'North', 'Bondi', 'Pitt', 'Park', 'Canberra', 'Darlinghurst'],
            "w": 21,
            },
        })
    return encoder
Example #21
0
def createClassifierEncoder():
    """Create the encoder instance for our test and return it."""
    encoder = MultiEncoder()
    encoder.addMultipleEncoders({
        "y": {
            "type": "CategoryEncoder",
            "categoryList": ['label_1', 'label_2'],
            "fieldname": u"y",
            "name": u"y",
            "w": 21,
        },
    })

    return encoder
def createClassifierEncoder():
    """Create the encoder instance for our test and return it."""
    encoder = MultiEncoder()
    encoder.addMultipleEncoders({
            "y": {
                    "type": "CategoryEncoder",
                    "categoryList": ['label_1', 'label_2'],
                    "fieldname": u"y",
                    "name": u"y",
                    "w": 21,
            },
    })

    return encoder
Example #23
0
def _createEncoder():
  """Create the encoder instance for our test and return it."""
  encoder = MultiEncoder()
  encoder.addMultipleEncoders({
      'timestamp': dict(fieldname='timestamp', type='DateEncoder',
                        timeOfDay=(5,5), forced=True),
      'attendeeCount': dict(fieldname='attendeeCount', type='ScalarEncoder',
                            name='attendeeCount', minval=0, maxval=270,
                            clipInput=True, w=5, resolution=10, forced=True),
      'consumption': dict(fieldname='consumption',type='ScalarEncoder',
                          name='consumption', minval=0,maxval=115,
                          clipInput=True, w=5, resolution=5, forced=True),
  })

  return encoder
def createScalarEncoder():
  scalarEncoder = ScalarEncoder(SCALAR_ENCODER_PARAMS['w'], 
                       SCALAR_ENCODER_PARAMS['minval'], 
                       SCALAR_ENCODER_PARAMS['maxval'], 
                       n=SCALAR_ENCODER_PARAMS['n'], 
                       name=SCALAR_ENCODER_PARAMS['name'])
  
  # NOTE: we don't want to encode the category along with the scalar input. 
  # The category will be fed separately to the classifier later, during the training phase.
  #categoryEncoder = CategoryEncoder(CATEGORY_ENCODER_PARAMS['w'],
  #                                  CATEGORY_ENCODER_PARAMS['categoryList'],
  #                                  name=CATEGORY_ENCODER_PARAMS['name'])
  encoder = MultiEncoder()
  encoder.addEncoder(SCALAR_ENCODER_PARAMS['name'], scalarEncoder)
  
  return encoder
Example #25
0
  def initialize(self):
    """
    Initialize this node.
    """

    Node.initialize(self)

    # Initialize input bits
    self.bits = []
    for x in range(self.width):
      for y in range(self.height):
        bit = Bit()
        bit.x = x
        bit.y = y
        self.bits.append(bit)

    if self.dataSourceType == DataSourceType.file:
      """
      Initialize this node opening the file and place cursor on the first record.
      """

      # If file name provided is a relative path, use project file path
      if self.fileName != '' and os.path.dirname(self.fileName) == '':
        fullFileName = os.path.dirname(Global.project.fileName) + '/' + self.fileName
      else:
        fullFileName = self.fileName

      # Check if file really exists
      if not os.path.isfile(fullFileName):
        QtGui.QMessageBox.warning(None, "Warning", "Input stream file '" + fullFileName + "' was not found or specified.", QtGui.QMessageBox.Ok)
        return

      # Create a data source for read the file
      self.dataSource = FileRecordStream(fullFileName)

    elif self.dataSourceType == DataSourceType.database:
      pass

    self.encoder = MultiEncoder()
    for encoding in self.encodings:
      encoding.initialize()

      # Create an instance class for an encoder given its module, class and constructor params
      encoding.encoder = getInstantiatedClass(encoding.encoderModule, encoding.encoderClass, encoding.encoderParams)

      # Take the first part of encoder field name as encoder name
      # Ex: timestamp_weekend.weekend => timestamp_weekend
      encoding.encoder.name = encoding.encoderFieldName.split('.')[0]

      # Add sub-encoder to multi-encoder list
      self.encoder.addEncoder(encoding.dataSourceFieldName, encoding.encoder)

    # If encoder size is not the same to sensor size then throws exception
    encoderSize = self.encoder.getWidth()
    sensorSize = self.width * self.height
    if encoderSize > sensorSize:
      QtGui.QMessageBox.warning(None, "Warning", "'" + self.name + "': Encoder size (" + str(encoderSize) + ") is different from sensor size (" + str(self.width) + " x " + str(self.height) + " = " + str(sensorSize) + ").", QtGui.QMessageBox.Ok)
      return

    return True
Example #26
0
def createSensorEncoder():
    """Create the encoder instance for our test and return it."""
    encoder = MultiEncoder()
    encoder.addMultipleEncoders({
        "x": {
            "type": "ScalarEncoder",
            "fieldname": u"x",
            "name": u"x",
            "maxval": 100.0,
            "minval": 0.0,
            "n": 100,
            "w": 21,
            "clipInput": True,
        },
    })

    return encoder
def createSensorEncoder():
    """Create the encoder instance for our test and return it."""
    encoder = MultiEncoder()
    encoder.addMultipleEncoders({
            "x": {
                    "type": "ScalarEncoder",
                    "fieldname": u"x",
                    "name": u"x",
                    "maxval": 100.0,
                    "minval": 0.0,
                    "n": 100,
                    "w": 21,
                    "clipInput": True,
            },
    })

    return encoder
Example #28
0
def createEncoder():
    """Create the encoder instance for our test and return it."""
    #consumption_encoder = ScalarEncoder(21, 0.0, 100.0, n=50, name="consumption",
    #     clipInput=True)
    consumption_encoder = ScalarEncoder(21,
                                        -50.0,
                                        300.0,
                                        n=50,
                                        name="consumption",
                                        clipInput=True)

    #time_encoder = DateEncoder(timeOfDay=(21, 9.5), name="timestamp_timeOfDay")

    encoder = MultiEncoder()
    encoder.addEncoder("consumption", consumption_encoder)
    #encoder.addEncoder("timestamp", time_encoder)

    return encoder
    def _makeRegion(self, name, params):
        sp_name    = "sp_" + name
        if self.tp_enable:
            tp_name    = "tp_" + name
        class_name = "class_" + name

        # addRegion
        self.network.addRegion(sp_name, "py.SPRegion", json.dumps(params['SP_PARAMS']))
        if self.tp_enable:
            self.network.addRegion(tp_name, "py.TPRegion", json.dumps(params['TP_PARAMS']))
        self.network.addRegion( class_name, "py.CLAClassifierRegion", json.dumps(params['CLASSIFIER_PARAMS']))

        encoder = MultiEncoder()
        encoder.addMultipleEncoders(self.class_encoder_params)
        self.classifier_encoder_list[class_name]  = encoder
        if self.tp_enable:
            self.classifier_input_list[class_name]    = tp_name
        else:
            self.classifier_input_list[class_name]    = sp_name
Example #30
0
    def _makeRegion(self, name, params):
        sp_name    = "sp_" + name
        if self.tp_enable:
            tp_name    = "tp_" + name
        class_name = "class_" + name

        # addRegion
        self.network.addRegion(sp_name, "py.SPRegion", json.dumps(params['SP_PARAMS']))
        if self.tp_enable:
            self.network.addRegion(tp_name, "py.TPRegion", json.dumps(params['TP_PARAMS']))
        self.network.addRegion( class_name, "py.CLAClassifierRegion", json.dumps(params['CLASSIFIER_PARAMS']))

        encoder = MultiEncoder()
        encoder.addMultipleEncoders(self.class_encoder_params)
        self.classifier_encoder_list[class_name]  = encoder
        if self.tp_enable:
            self.classifier_input_list[class_name]    = tp_name
        else:
            self.classifier_input_list[class_name]    = sp_name
Example #31
0
def createEncoder():
    consumptionEncoder = ScalarEncoder(21, 0, 1024, n=50, name="consumption")
    timeEncoder = DateEncoder(timeOfDay=(21,9.5), name="timestamp_timeOfDay")
    encoder = MultiEncoder()
    encoder.addEncoder("consumption", consumptionEncoder)
    encoder.addEncoder("timestamp", timeEncoder)
    return encoder
Example #32
0
def createEncoder():
    # TODO: vector
    encoder = MultiEncoder()
    encoder.addMultipleEncoders({
        "consumption": {
            "clipInput": True,
            "type": "ScalarEncoder",
            "fieldname": u"consumption",
            "name": u"consumption",
            "maxval": 100.0,
            "minval": 0.0,
            "n": 50,
            "w": 21,
            },
        "timestamp_timeOfDay": {
            "fieldname": u"timestamp",
            "name": u"timestamp_timeOfDay",
            "timeOfDay": (21, 9.5),
            "type": "DateEncoder",
            },
        })
    return encoder
Example #33
0
def createEncoder():
    #volume_encoder = ScalarEncoder(7, 0.0, 70.0, n=200, name="volume", clipInput=False, forced=True)
    #floorheight_encoder = ScalarEncoder(1, 0.0, 70.0, n=25, name="floorheight", clipInput=False, forced=True)

    diagCoorA_encoder = ScalarEncoder(257,
                                      0.0,
                                      200.0,
                                      n=2048,
                                      name="diagCoorA",
                                      clipInput=False,
                                      forced=True)
    #diagCoorB_encoder = ScalarEncoder(157, 0.0, 200.0, n=2000, name="diagCoorB", clipInput=False, forced=True)
    #diagCoorC_encoder = ScalarEncoder(157, 0.0, 200.0, n=2000, name="diagCoorC", clipInput=False, forced=True)
    #diagCoorD_encoder = ScalarEncoder(157, 0.0, 200.0, n=2000, name="diagCoorD", clipInput=False, forced=True)
    #diagCoorE_encoder = ScalarEncoder(157, 0.0, 200.0, n=2000, name="diagCoorE", clipInput=False, forced=True)
    #diagCoorF_encoder = ScalarEncoder(157, 0.0, 200.0, n=2000, name="diagCoorF", clipInput=False, forced=True)
    #diagCoorG_encoder = ScalarEncoder(157, 0.0, 200.0, n=2000, name="diagCoorG", clipInput=False, forced=True)
    #diagCoorH_encoder = ScalarEncoder(157, 0.0, 200.0, n=2000, name="diagCoorH", clipInput=False, forced=True)
    #diagCoorI_encoder = ScalarEncoder(157, 0.0, 200.0, n=2000, name="diagCoorI", clipInput=False, forced=True)
    #diagCoorJ_encoder = ScalarEncoder(157, 0.0, 200.0, n=2000, name="diagCoorJ", clipInput=False, forced=True)

    global encoder
    encoder = MultiEncoder()

    #encoder.addEncoder("volume", volume_encoder)
    #encoder.addEncoder("floorheight", floorheight_encoder)
    encoder.addEncoder("diagCoorA", diagCoorA_encoder)
    #encoder.addEncoder("diagCoorB", diagCoorB_encoder)
    #encoder.addEncoder("diagCoorC", diagCoorC_encoder)
    #encoder.addEncoder("diagCoorD", diagCoorD_encoder)
    #encoder.addEncoder("diagCoorE", diagCoorE_encoder)
    #encoder.addEncoder("diagCoorF", diagCoorF_encoder)
    #encoder.addEncoder("diagCoorG", diagCoorG_encoder)
    #encoder.addEncoder("diagCoorH", diagCoorH_encoder)
    #encoder.addEncoder("diagCoorI", diagCoorI_encoder)
    #encoder.addEncoder("diagCoorJ", diagCoorJ_encoder)

    return encoder
Example #34
0
def createEncoder():
    """Create the encoder instance for our test and return it."""
    encoder = MultiEncoder()
    encoder.addMultipleEncoders({
        "consumption": {
            "clipInput": True,
            "fieldname": u"consumption",
            "maxval": 100.0,
            "minval": 0.0,
            "n": 50,
            "name": u"consumption",
            "type": "ScalarEncoder",
            "w": 21,
        },
        "timestamp_timeOfDay": {
            "fieldname": u"timestamp",
            "name": u"timestamp_timeOfDay",
            "timeOfDay": (21, 9.5),
            "type": "DateEncoder",
        },
    })

    return encoder
Example #35
0
def createEncoder():
  """Create the encoder instance for our test and return it."""
  encoder = MultiEncoder()
  encoder.addMultipleEncoders({
      "consumption": {
          "clipInput": True,
          "fieldname": u"consumption",
          "maxval": 100.0,
          "minval": 0.0,
          "n": 50,
          "name": u"consumption",
          "type": "ScalarEncoder",
          "w": 21,
      },
      "timestamp_timeOfDay": {
          "fieldname": u"timestamp",
          "name": u"timestamp_timeOfDay",
          "timeOfDay": (21, 9.5),
          "type": "DateEncoder",
      },
  })

  return encoder
Example #36
0
def createEncoder():
  """
  Creates and returns a #MultiEncoder including a ScalarEncoder for
  energy consumption and a DateEncoder for the time of the day.

  @see nupic/encoders/__init__.py for type to file-name mapping
  @see nupic/encoders for encoder source files
  """
  encoder = MultiEncoder()
  encoder.addMultipleEncoders({
      "consumption": {"fieldname": u"consumption",
                      "type": "ScalarEncoder",
                      "name": u"consumption",
                      "minval": 0.0,
                      "maxval": 100.0,
                      "clipInput": True,
                      "w": 21,
                      "n": 500},
      "timestamp_timeOfDay": {"fieldname": u"timestamp",
                              "type": "DateEncoder",
                              "name": u"timestamp_timeOfDay",
                              "timeOfDay": (21, 9.5)}
  })
  return encoder
Example #37
0
def createEncoder():

    diagCoorA_encoder = ScalarEncoder(105,
                                      0.0,
                                      200.0,
                                      n=1324,
                                      name="diagCoorA",
                                      clipInput=False,
                                      forced=True)
    diagCoorB_encoder = ScalarEncoder(105,
                                      0.0,
                                      200.0,
                                      n=1324,
                                      name="diagCoorB",
                                      clipInput=False,
                                      forced=True)

    global encoder
    encoder = MultiEncoder()

    encoder.addEncoder("diagCoorA", diagCoorA_encoder)
    encoder.addEncoder("diagCoorB", diagCoorB_encoder)

    return encoder
Example #38
0
def createEncoder():

    diagCoorA_encoder = ScalarEncoder(55, 0.0, 200.0, n=200, name="diagCoorA")
    diagCoorB_encoder = ScalarEncoder(55, 0.0, 200.0, n=200, name="diagCoorB")
    diagCoorC_encoder = ScalarEncoder(55, 0.0, 200.0, n=200, name="diagCoorC")

    global encoder
    encoder = MultiEncoder()

    encoder.addEncoder("diagCoorA", diagCoorA_encoder)
    encoder.addEncoder("diagCoorB", diagCoorB_encoder)
    encoder.addEncoder("diagCoorC", diagCoorC_encoder)

    return encoder
Example #39
0
def createEncoder(multilevelAnomaly=False):
    encoder = MultiEncoder()
    if multilevelAnomaly == False:
        encoder.addMultipleEncoders({
            "cpu": {
                "fieldname": u"cpu",
                "type": "ScalarEncoder",
                "name": u"cpu",
                "minval": 0.0,
                "maxval": 100.0,
                "clipInput": True,
                "w": 21,
                "n": 500
            }
        })
    else:
        encoder.addMultipleEncoders({
            "cpu": {
                "fieldname": u"cpu",
                "type": "ScalarEncoder",
                "name": u"cpu",
                "minval": 0.0,
                "maxval": 100.0,
                "clipInput": True,
                "w": 21,
                "n": 500
            },
            "mem": {
                "fieldname": u"mem",
                "type": "ScalarEncoder",
                "name": u"mem",
                "minval": 0.0,
                "maxval": 100.0,
                "clipInput": True,
                "w": 21,
                "n": 500
            }
        })

    return encoder
Example #40
0
class Sensor(Node):
  """
  A super class only to group properties related to sensors.
  """

  #region Constructor

  def __init__(self, name):
    """
    Initializes a new instance of this class.
    """

    Node.__init__(self, name, NodeType.sensor)

    #region Instance fields

    self.bits = []
    """An array of the bit objects that compose the current output of this node."""

    self.dataSource = None
    """Data source which provides records to fed into a region."""

    self.dataSourceType = DataSourceType.file
    """Type of the data source (File or Database)"""

    self.fileName = ''
    """The input file name to be handled. Returns the input file name only if it is in the project directory, full path otherwise."""

    self.databaseConnectionString = ""
    """Connection string of the database."""

    self.databaseTable = ''
    """Target table of the database."""

    self.encoder = None
    """Multi-encoder which concatenate sub-encodings to convert raw data to htm input and vice-versa."""

    self.encodings = []
    """List of sub-encodings that handles the input from database"""

    self.predictionsMethod = PredictionsMethod.reconstruction
    """Method used to get predicted values and their probabilities."""

    self.enableClassificationLearning = True
    """Switch for classification learning"""

    self.enableClassificationInference = True
    """Switch for classification inference"""

    #endregion

    #region Statistics properties

    self.statsPrecisionRate = 0.

    #endregion

  #endregion

  #region Methods

  def getBit(self, x, y):
    """
    Return the bit located at given position
    """

    bit = self.bits[(y * self.width) + x]

    return bit

  def initialize(self):
    """
    Initialize this node.
    """

    Node.initialize(self)

    # Initialize input bits
    self.bits = []
    for x in range(self.width):
      for y in range(self.height):
        bit = Bit()
        bit.x = x
        bit.y = y
        self.bits.append(bit)

    if self.dataSourceType == DataSourceType.file:
      """
      Initialize this node opening the file and place cursor on the first record.
      """

      # If file name provided is a relative path, use project file path
      if self.fileName != '' and os.path.dirname(self.fileName) == '':
        fullFileName = os.path.dirname(Global.project.fileName) + '/' + self.fileName
      else:
        fullFileName = self.fileName

      # Check if file really exists
      if not os.path.isfile(fullFileName):
        QtGui.QMessageBox.warning(None, "Warning", "Input stream file '" + fullFileName + "' was not found or specified.", QtGui.QMessageBox.Ok)
        return

      # Create a data source for read the file
      self.dataSource = FileRecordStream(fullFileName)

    elif self.dataSourceType == DataSourceType.database:
      pass

    self.encoder = MultiEncoder()
    for encoding in self.encodings:
      encoding.initialize()

      # Create an instance class for an encoder given its module, class and constructor params
      encoding.encoder = getInstantiatedClass(encoding.encoderModule, encoding.encoderClass, encoding.encoderParams)

      # Take the first part of encoder field name as encoder name
      # Ex: timestamp_weekend.weekend => timestamp_weekend
      encoding.encoder.name = encoding.encoderFieldName.split('.')[0]

      # Add sub-encoder to multi-encoder list
      self.encoder.addEncoder(encoding.dataSourceFieldName, encoding.encoder)

    # If encoder size is not the same to sensor size then throws exception
    encoderSize = self.encoder.getWidth()
    sensorSize = self.width * self.height
    if encoderSize > sensorSize:
      QtGui.QMessageBox.warning(None, "Warning", "'" + self.name + "': Encoder size (" + str(encoderSize) + ") is different from sensor size (" + str(self.width) + " x " + str(self.height) + " = " + str(sensorSize) + ").", QtGui.QMessageBox.Ok)
      return

    return True

  def nextStep(self):
    """
    Performs actions related to time step progression.
    """

    # Update states machine by remove the first element and add a new element in the end
    for encoding in self.encodings:
      encoding.currentValue.rotate()
      if encoding.enableInference:
        encoding.predictedValues.rotate()
        encoding.bestPredictedValue.rotate()

    Node.nextStep(self)
    for bit in self.bits:
      bit.nextStep()

    # Get record value from data source
    # If the last record was reached just rewind it
    data = self.dataSource.getNextRecordDict()
    if not data:
      self.dataSource.rewind()
      data = self.dataSource.getNextRecordDict()

    # Pass raw values to encoder and get a concatenated array
    outputArray = numpy.zeros(self.encoder.getWidth())
    self.encoder.encodeIntoArray(data, outputArray)

    # Get values obtained from the data source.
    outputValues = self.encoder.getScalars(data)

    # Get raw values and respective encoded bit array for each field
    prevOffset = 0
    for i in range(len(self.encodings)):
      encoding = self.encodings[i]

      # Convert the value to its respective data type
      currValue = outputValues[i]
      if encoding.encoderFieldDataType == FieldDataType.boolean:
        currValue = bool(currValue)
      elif encoding.encoderFieldDataType == FieldDataType.integer:
        currValue = int(currValue)
      elif encoding.encoderFieldDataType == FieldDataType.decimal:
        currValue = float(currValue)
      elif encoding.encoderFieldDataType == FieldDataType.dateTime:
        currValue = dateutil.parser.parse(str(currValue))
      elif encoding.encoderFieldDataType == FieldDataType.string:
        currValue = str(currValue)
      encoding.currentValue.setForCurrStep(currValue)

    # Update sensor bits
    for i in range(len(outputArray)):
      if outputArray[i] > 0.:
        self.bits[i].isActive.setForCurrStep(True)
      else:
        self.bits[i].isActive.setForCurrStep(False)

    # Mark falsely predicted bits
    for bit in self.bits:
      if bit.isPredicted.atPreviousStep() and not bit.isActive.atCurrStep():
        bit.isFalselyPredicted.setForCurrStep(True)

    self._output = outputArray

  def getPredictions(self):
    """
    Get the predictions after an iteration.
    """

    if self.predictionsMethod == PredictionsMethod.reconstruction:

      # Prepare list with predictions to be classified
      # This list contains the indexes of all bits that are predicted
      output = []
      for i in range(len(self.bits)):
        if self.bits[i].isPredicted.atCurrStep():
          output.append(1)
        else:
          output.append(0)
      output = numpy.array(output)

      # Decode output and create predictions list
      fieldsDict, fieldsOrder = self.encoder.decode(output)
      for encoding in self.encodings:
        if encoding.enableInference:
          predictions = []
          encoding.predictedValues.setForCurrStep(dict())

          # If encoder field name was returned by decode(), assign the the predictions to it
          if encoding.encoderFieldName in fieldsOrder:
            predictedLabels = fieldsDict[encoding.encoderFieldName][1].split(', ')
            predictedValues = fieldsDict[encoding.encoderFieldName][0]
            for i in range(len(predictedLabels)):
              predictions.append([predictedValues[i], predictedLabels[i]])

          encoding.predictedValues.atCurrStep()[1] = predictions

          # Get the predicted value with the biggest probability to happen
          if len(predictions) > 0:
            bestPredictionRange = predictions[0][0]
            min = bestPredictionRange[0]
            max = bestPredictionRange[1]
            bestPredictedValue = (min + max) / 2.0
            encoding.bestPredictedValue.setForCurrStep(bestPredictedValue)

    elif self.predictionsMethod == PredictionsMethod.classification:
      # A classification involves estimate which are the likely values to occurs in the next time step.

      offset = 0
      for encoding in self.encodings:
        encoderWidth = encoding.encoder.getWidth()

        if encoding.enableInference:
          # Prepare list with predictions to be classified
          # This list contains the indexes of all bits that are predicted
          patternNZ = []
          for i in range(offset, encoderWidth):
            if self.bits[i].isActive.atCurrStep():
              patternNZ.append(i)

          # Get the bucket index of the current value at the encoder
          actualValue = encoding.currentValue.atCurrStep()
          bucketIdx = encoding.encoder.getBucketIndices(actualValue)[0]

          # Perform classification
          clasResults = encoding.classifier.compute(recordNum=Global.currStep, patternNZ=patternNZ, classification={'bucketIdx': bucketIdx, 'actValue': actualValue}, learn=self.enableClassificationLearning, infer=self.enableClassificationInference)

          encoding.predictedValues.setForCurrStep(dict())
          for step in encoding.steps:

            # Calculate probability for each predicted value
            predictions = dict()
            for (actValue, prob) in zip(clasResults['actualValues'], clasResults[step]):
              if actValue in predictions:
                predictions[actValue] += prob
              else:
                predictions[actValue] = prob

            # Remove predictions with low probabilities
            maxVal = (None, None)
            for (actValue, prob) in predictions.items():
              if len(predictions) <= 1:
                break
              if maxVal[0] is None or prob >= maxVal[1]:
                if maxVal[0] is not None and maxVal[1] < encoding.minProbabilityThreshold:
                  del predictions[maxVal[0]]
                maxVal = (actValue, prob)
              elif prob < encoding.minProbabilityThreshold:
                del predictions[actValue]

            # Sort the list of values from more probable to less probable values
            # an decrease the list length to max predictions per step limit
            predictions = sorted(predictions.iteritems(), key=operator.itemgetter(1), reverse=True)
            predictions = predictions[:maxFutureSteps]

            encoding.predictedValues.atCurrStep()[step] = predictions

          # Get the predicted value with the biggest probability to happen
          bestPredictedValue = encoding.predictedValues.atCurrStep()[1][0][0]
          encoding.bestPredictedValue.setForCurrStep(bestPredictedValue)

        offset += encoderWidth

  def calculateStatistics(self):
    """
    Calculate statistics after an iteration.
    """

    if Global.currStep > 0:
      precision = 0.

      # Calculate the prediction precision comparing if the current value is in the range of any prediction.
      for encoding in self.encodings:
        if encoding.enableInference:
          predictions = encoding.predictedValues.atPreviousStep()[1]
          for predictedValue in predictions:
            min = None
            max = None
            value = predictedValue[0]
            if self.predictionsMethod == PredictionsMethod.reconstruction:
              min = value[0]
              max = value[1]
            elif self.predictionsMethod == PredictionsMethod.classification:
              min = value
              max = value
            if isinstance(min, (int, long, float, complex)) and isinstance(max, (int, long, float, complex)):
              min = math.floor(min)
              max = math.ceil(max)
            if min <= encoding.currentValue.atCurrStep() <= max:
              precision = 100.
              break

      # The precision rate is the average of the precision calculated in every step
      self.statsPrecisionRate = (self.statsPrecisionRate + precision) / 2
    else:
      self.statsPrecisionRate = 0.

    for bit in self.bits:
      bit.calculateStatistics()
Example #41
0
class Entity():
    def __init__(self, columnCount, InputEncoderParams, toL4ConnectorParamsI,
                 toL4ConnectorParamsII, toL5ConnectorParams,
                 toD1ConnectorParams, toD2ConnectorParams, L4Params, L5Params,
                 k, D1Params, D2Params):
        self.columnCount = columnCount
        self.toL4ConnectorParamsI = toL4ConnectorParamsI
        self.toL4ConnectorParamsII = toL4ConnectorParamsII
        self.toL5ConnectorParams = toL5ConnectorParams
        self.toD1ConnectorParams = toD1ConnectorParams
        self.toD2ConnectorParams = toD2ConnectorParams
        self.L4Params = L4Params
        self.L5Params = L5Params
        self.k = k
        self.D1Params = D1Params
        self.D2Params = D2Params
        self.learning = False

        #encoder
        from nupic.encoders import MultiEncoder
        self.InputEncoder = MultiEncoder()
        self.InputEncoder.addMultipleEncoders(InputEncoderParams)
        print "Encoder Online"

        #spatialPoolers
        from nupic.algorithms.spatial_pooler import SpatialPooler
        self.toL4ConnectorI = SpatialPooler(
            inputDimensions=(toL4ConnectorParamsI["inputDimensions"], ),
            columnDimensions=(columnCount, ),
            potentialPct=toL4ConnectorParamsI["potentialPct"],
            globalInhibition=toL4ConnectorParamsI["globalInhibition"],
            localAreaDensity=toL4ConnectorParamsI["localAreaDensity"],
            numActiveColumnsPerInhArea=toL4ConnectorParamsI[
                "numActiveColumnsPerInhArea"],
            synPermInactiveDec=toL4ConnectorParamsI["synPermInactiveDec"],
            synPermActiveInc=toL4ConnectorParamsI["synPermActiveInc"],
            synPermConnected=toL4ConnectorParamsI["synPermConnected"],
            boostStrength=toL4ConnectorParamsI["boostStrength"],
            seed=toL4ConnectorParamsI["seed"],
            wrapAround=toL4ConnectorParamsI["wrapAround"])  #this part sucks
        self.toL4ConnectorII = SpatialPooler(
            inputDimensions=(columnCount * 3, ),
            columnDimensions=(columnCount, ),
            potentialPct=toL4ConnectorParamsII["potentialPct"],
            globalInhibition=toL4ConnectorParamsII["globalInhibition"],
            localAreaDensity=toL4ConnectorParamsII["localAreaDensity"],
            numActiveColumnsPerInhArea=toL4ConnectorParamsII[
                "numActiveColumnsPerInhArea"],
            synPermInactiveDec=toL4ConnectorParamsII["synPermInactiveDec"],
            synPermActiveInc=toL4ConnectorParamsII["synPermActiveInc"],
            synPermConnected=toL4ConnectorParamsII["synPermConnected"],
            boostStrength=toL4ConnectorParamsII["boostStrength"],
            seed=toL4ConnectorParamsII["seed"],
            wrapAround=toL4ConnectorParamsII["wrapAround"])
        print "toL4Connector Online"
        self.toL5Connector = SpatialPooler(
            inputDimensions=(columnCount, ),
            columnDimensions=(columnCount, ),
            potentialPct=toL5ConnectorParams["potentialPct"],
            globalInhibition=toL5ConnectorParams["globalInhibition"],
            localAreaDensity=toL5ConnectorParams["localAreaDensity"],
            numActiveColumnsPerInhArea=toL5ConnectorParams[
                "numActiveColumnsPerInhArea"],
            synPermInactiveDec=toL5ConnectorParams["synPermInactiveDec"],
            synPermActiveInc=toL5ConnectorParams["synPermActiveInc"],
            synPermConnected=toL5ConnectorParams["synPermConnected"],
            boostStrength=toL5ConnectorParams["boostStrength"],
            seed=toL5ConnectorParams["seed"],
            wrapAround=toL5ConnectorParams["wrapAround"])
        print "toL5Connector Online"
        self.toD1Connector = SpatialPooler(
            inputDimensions=(columnCount, ),
            columnDimensions=(columnCount, ),
            potentialPct=toD1ConnectorParams["potentialPct"],
            globalInhibition=toD1ConnectorParams["globalInhibition"],
            localAreaDensity=toD1ConnectorParams["localAreaDensity"],
            numActiveColumnsPerInhArea=toD1ConnectorParams[
                "numActiveColumnsPerInhArea"],
            synPermInactiveDec=toD1ConnectorParams["synPermInactiveDec"],
            synPermActiveInc=toD1ConnectorParams["synPermActiveInc"],
            synPermConnected=toD1ConnectorParams["synPermConnected"],
            boostStrength=toD1ConnectorParams["boostStrength"],
            seed=toD1ConnectorParams["seed"],
            wrapAround=toD1ConnectorParams["wrapAround"])
        print "toD1Connector Online"
        self.toD2Connector = SpatialPooler(
            inputDimensions=(columnCount, ),
            columnDimensions=(columnCount, ),
            potentialPct=toD2ConnectorParams["potentialPct"],
            globalInhibition=toD2ConnectorParams["globalInhibition"],
            localAreaDensity=toD2ConnectorParams["localAreaDensity"],
            numActiveColumnsPerInhArea=toD2ConnectorParams[
                "numActiveColumnsPerInhArea"],
            synPermInactiveDec=toD2ConnectorParams["synPermInactiveDec"],
            synPermActiveInc=toD2ConnectorParams["synPermActiveInc"],
            synPermConnected=toD2ConnectorParams["synPermConnected"],
            boostStrength=toD2ConnectorParams["boostStrength"],
            seed=toD2ConnectorParams["seed"],
            wrapAround=toD2ConnectorParams["wrapAround"])
        print "toD2Connector Online"

        #HTM Layers
        from nupic.algorithms.temporal_memory import TemporalMemory
        self.L4ActiveColumns = numpy.zeros(self.columnCount, dtype=int)
        self.L4 = TemporalMemory(
            columnDimensions=(columnCount, ),
            seed=42,
        )
        print "L4 Online"
        self.L5ActiveColumns = numpy.zeros(self.columnCount, dtype=int)
        self.L5 = TemporalMemory(
            columnDimensions=(columnCount, ),
            seed=42,
        )
        print "L5 Online"
        self.D1ActiveColumns = numpy.zeros(self.columnCount, dtype=int)
        self.D1 = TemporalMemory(
            columnDimensions=(columnCount, ),
            seed=42,
            initialPermanence=0.21,
            connectedPermanence=0.5,
        )
        print "D1 Online"
        self.D2ActiveColumns = numpy.zeros(self.columnCount, dtype=int)
        self.D2 = TemporalMemory(
            columnDimensions=(columnCount, ),
            seed=42,
            initialPermanence=0.21,
            connectedPermanence=0.5,
        )
        print "D2 Online"

    def encode_input(sine1, sine2, angularSpeed1, angularSpeed2,
                     efferenceCopy):
        return self.InputEncoder.encode({
            "sine1": sine1,
            "sine2": sine2,
            "angularSpeed1": angularSpeed1,
            "angularSpeed2": angularSpeed2,
            "efferenceCopy": efferenceCopy
        })

    def reset(self):
        self.action = 0
        self.L4.reset()
        self.L5.reset()
        self.D1.reset()
        self.D2.reset()

    def mimic(self, observation, action):
        #mimicking only requires remembering the given obs-act pattern,thus the striatum is neglected in this func
        self.learning = True
        self.action = action
        encodedInput = self.encode_input(observation[0], observation[2],
                                         observation[4], observation[5],
                                         str(action))

        self.toL4ConnectorI.compute(encodedInput, self.learning,
                                    self.L4ActiveColumns)
        self.L4.compute(self.L4ActiveColumns, learn=self.learning)
        L4activeColumnIndices = numpy.nonzero(self.L4ActiveColumns)[0]

        L5Temp = numpy.zeros(self.columnCount, dtype=int)
        for column in L4activeColumnIndices:
            L5Temp[column] = 1
        self.toL5Connector.compute(L5Temp, self.learning, self.L5ActiveColumns)
        self.L5.compute(self.L5ActiveColumns, learn=self.learning)
        L5activeColumnIndices = numpy.nonzero(self.L5ActiveColumns)[0]

        #no action generation is needed in this func

    def learn(self, env, observation, expectedReaction):
        #We humans learn by trial and error,so does an AI agent.For neural networks,they have BP,but HTM does not
        #have a clear way to reinforcement learn(Where to feed in rewards?).Here I try to do something new.
        self.learning = False  #...trial
        encodedInput = self.encode_input(observation[0], observation[2],
                                         observation[4], observation[5],
                                         str(self.action))

        self.toL4ConnectorI.compute(encodedInput, self.learning,
                                    self.L4ActiveColumns)
        L4Temp = numpy.zeros(
            self.columnCount * 3, dtype=int
        )  #ready to receive D1's disinhibition and D2's inhibition
        L4activeColumnIndices = numpy.nonzero(self.L4ActiveColumns)[0]
        for column in L4activeColumnIndices:
            L4Temp[int(column) * 3] = 1
        D1ActiveColumnsIndices = numpy.nonzero(self.D1ActiveColumns)[0]
        for column in D1ActiveColumnsIndices:
            L4Temp[int(column) * 3 + 1] = 1
        D2ActiveColumnsIndices = numpy.nonzero(self.D2ActiveColumns)[0]
        for i in range(self.columnCount - 1):
            L4Temp[i * 3 + 2] = 1
        for column in D2ActiveColumnsIndices:  #achieve inhibition in this way
            L4Temp[i * 3 + 2] = 0
        self.toL4ConnectorII.compute(L4Temp, self.learning,
                                     self.L4ActiveColumns)
        self.L4.compute(self.L4ActiveColumns, learn=self.learning)
        L4activeColumnIndices = numpy.nonzero(self.L4ActiveColumns)[0]

        L5Temp = numpy.zeros(self.columnCount, dtype=int)
        for column in L4activeColumnIndices:
            L5Temp[column] = 1
        self.toL5Connector.compute(L5Temp, self.learning, self.L5ActiveColumns)
        self.L5.compute(self.L5ActiveColumns, learn=self.learning)
        L5activeColumnIndices = numpy.nonzero(self.L5ActiveColumns)[0]

        #Action Generation
        p = 84  #there are 84 bits in the SDR representing the action fed in the agent,this is the "Efference Copy"
        count0 = 0
        count1 = 0
        count2 = 0
        for activeIndice in L5activeColumnIndices:
            convertedIndice = (activeIndice + 1) * 1126 / columnCount
            if convertedIndice <= 1126 - p / 4 and convertedIndice > 1126 - p / 2:
                count2 = count2 + 1
            if convertedIndice <= 1126 - p / 2 and convertedIndice > 1126 - 3 * p / 4:
                count1 = count1 + 1
            if convertedIndice <= 1126 - 3 * p / 4 and convertedIndice > 1126 - p:
                count0 = count0 + 1

        if count2 == max(count0, count1, count2):
            self.action = 2
        if count1 == max(count0, count1, count2):
            self.action = 1
        if count0 == max(count0, count1, count2):
            self.action = 0

        #...and error
        if self.action == expectedReaction:
            reward = 0.1
        else:
            reward = -0.1

        self.D1.setConnectedPermanence(self.D1.getConnectedPermanence() *
                                       (self.k**(-reward)))  #reward
        self.D2.setConnectedPermanence(self.D2.getConnectedPermanence() *
                                       (self.k**reward))  #punishment

        #Learn to correct mistakes(remember what's right and whats' wrong)
        self.learning = True
        DTemp = numpy.zeros(self.columnCount, dtype=int)
        for column in L5activeColumnIndices:
            DTemp[column] = 1
        self.toD1Connector.compute(DTemp, self.learning, self.D1ActiveColumns)
        self.toD2Connector.compute(DTemp, self.learning, self.D2ActiveColumns)
        self.D1.compute(self.D1ActiveColumns, learn=self.learning)
        self.D2.compute(self.D2ActiveColumns, learn=self.learning)

        return reward

    def react(self, observation):
        self.learning = False
        encodedInput = self.encode_input(observation[0], observation[2],
                                         observation[4], observation[5],
                                         str(self.action))

        self.toL4ConnectorI.compute(encodedInput, self.learning,
                                    self.L4ActiveColumns)
        L4Temp = numpy.zeros(self.columnCount * 3, dtype=int)
        L4activeColumnIndices = numpy.nonzero(self.L4ActiveColumns)[0]
        for column in L4activeColumnIndices:
            L4Temp[int(column) * 3] = 1
        D1ActiveColumnsIndices = numpy.nonzero(self.D1ActiveColumns)[0]
        for column in D1ActiveColumnsIndices:
            L4Temp[int(column) * 3 + 1] = 1
        D2ActiveColumnsIndices = numpy.nonzero(self.D2ActiveColumns)[0]
        for i in range(self.columnCount - 1):
            L4Temp[i * 3 + 2] = 1
        for column in D2ActiveColumnsIndices:
            L4Temp[i * 3 + 2] = 0
        self.toL4ConnectorII.compute(L4Temp, self.learning,
                                     self.L4ActiveColumns)
        self.L4.compute(self.L4ActiveColumns, learn=self.learning)
        L4activeColumnIndices = numpy.nonzero(self.L4ActiveColumns)[0]

        L5Temp = numpy.zeros(self.columnCount, dtype=int)
        for column in L4activeColumnIndices:
            L5Temp[column] = 1
        self.toL5Connector.compute(L5Temp, self.learning, self.L5ActiveColumns)
        self.L5.compute(self.L5ActiveColumns, learn=self.learning)
        L5activeColumnIndices = numpy.nonzero(self.L5ActiveColumns)[0]

        p = 84
        count0 = 0
        count1 = 0
        count2 = 0
        for activeIndice in L5activeColumnIndices:
            convertedIndice = (activeIndice + 1) * 1126 / columnCount
            if convertedIndice <= 1126 - p / 4 and convertedIndice > 1126 - p / 2:
                count2 = count2 + 1
            if convertedIndice <= 1126 - p / 2 and convertedIndice > 1126 - 3 * p / 4:
                count1 = count1 + 1
            if convertedIndice <= 1126 - 3 * p / 4 and convertedIndice > 1126 - p:
                count0 = count0 + 1

        if count2 == max(count0, count1, count2):
            self.action = 2
        if count1 == max(count0, count1, count2):
            self.action = 1
        if count0 == max(count0, count1, count2):
            self.action = 0

        return self.action
Example #42
0
    def testDeltaFilter(self):
        """
    data looks like:        should generate deltas
      "t"   "s"               "dt"     "ds"

      t     10                 X
      t+1s  20                 1s      10
      t+1d  50                 86399   30

    r t+1d+1s  60              X
      r+1d+3s  65              2s       5

    """
        r = RecordSensor()
        filename = findDataset("extra/qa/delta.csv")
        datasource = FileRecordStream(filename)
        r.dataSource = datasource
        n = 50
        encoder = MultiEncoder({
            'blah':
            dict(fieldname="s",
                 type='ScalarEncoder',
                 n=n,
                 w=11,
                 minval=0,
                 maxval=100)
        })

        r.encoder = encoder

        # Test #1 -- no deltas
        # Make sure we get a reset when the gym changes
        resetOut = numpy.zeros((1, ), dtype='float')
        sequenceIdOut = numpy.zeros((1, ), dtype='float')
        dataOut = numpy.zeros((n, ), dtype='float')
        sourceOut = numpy.zeros((1, ), dtype='float')
        categoryOut = numpy.zeros((1, ), dtype='float')

        outputs = dict(resetOut=resetOut,
                       sourceOut=sourceOut,
                       sequenceIdOut=sequenceIdOut,
                       dataOut=dataOut,
                       categoryOut=categoryOut)
        inputs = dict()
        r.verbosity = 0

        r.compute(inputs, outputs)
        lr = r.lastRecord
        self.assertEqual(
            lr['t'],
            datetime(year=2011, month=2, day=24, hour=16, minute=8, second=0))
        self.assertEqual(lr['s'], 10)
        self.assertEqual(lr['_reset'], 1)
        self.assertTrue('dt' not in lr)
        self.assertTrue('ds' not in lr)

        r.compute(inputs, outputs)
        lr = r.lastRecord
        self.assertEqual(
            lr['t'],
            datetime(year=2011, month=2, day=24, hour=16, minute=8, second=1))
        self.assertEqual(lr['s'], 20)
        self.assertEqual(lr['_reset'], 0)

        r.compute(inputs, outputs)
        lr = r.lastRecord
        self.assertEqual(
            lr['t'],
            datetime(year=2011, month=2, day=25, hour=16, minute=8, second=0))
        self.assertEqual(lr['s'], 50)
        self.assertEqual(lr['_reset'], 0)

        r.compute(inputs, outputs)
        lr = r.lastRecord
        self.assertEqual(
            lr['t'],
            datetime(year=2011, month=2, day=25, hour=16, minute=8, second=1))
        self.assertEqual(lr['s'], 60)
        self.assertEqual(lr['_reset'], 1)

        r.compute(inputs, outputs)
        lr = r.lastRecord
        self.assertEqual(
            lr['t'],
            datetime(year=2011, month=2, day=25, hour=16, minute=8, second=3))
        self.assertEqual(lr['s'], 65)
        self.assertEqual(lr['_reset'], 0)

        # Add filters

        r.preEncodingFilters = [DeltaFilter("s", "ds"), DeltaFilter("t", "dt")]
        r.rewind()

        # skip first record, which has a reset

        r.compute(inputs, outputs)
        lr = r.lastRecord
        self.assertEqual(
            lr['t'],
            datetime(year=2011, month=2, day=24, hour=16, minute=8, second=1))
        self.assertEqual(lr['s'], 20)
        self.assertEqual(lr['_reset'],
                         1)  # this record should have a reset since
        # it is first of a sequence
        self.assertEqual(lr['dt'], 1)
        self.assertEqual(lr['ds'], 10)

        r.compute(inputs, outputs)
        lr = r.lastRecord
        self.assertEqual(
            lr['t'],
            datetime(year=2011, month=2, day=25, hour=16, minute=8, second=0))
        self.assertEqual(lr['s'], 50)
        self.assertEqual(lr['_reset'], 0)
        self.assertEqual(lr['dt'], 3600 * 24 - 1)
        self.assertEqual(lr['ds'], 30)

        # next reset record is skipped

        r.compute(inputs, outputs)
        lr = r.lastRecord
        self.assertEqual(
            lr['t'],
            datetime(year=2011, month=2, day=25, hour=16, minute=8, second=3))
        self.assertEqual(lr['s'], 65)
        self.assertEqual(lr['_reset'], 1)
        self.assertEqual(lr['dt'], 2)
        self.assertEqual(lr['ds'], 5)
    def testSimpleMulticlassNetworkPY(self):
        # Setup data record stream of fake data (with three categories)
        filename = _getTempFileName()
        fields = [("timestamp", "datetime", "T"), ("value", "float", ""),
                  ("reset", "int", "R"), ("sid", "int", "S"),
                  ("categories", "list", "C")]
        records = ([datetime(day=1, month=3, year=2010), 0.0, 1, 0, "0"], [
            datetime(day=2, month=3, year=2010), 1.0, 0, 0, "1"
        ], [datetime(day=3, month=3, year=2010), 0.0, 0, 0,
            "0"], [datetime(day=4, month=3, year=2010), 1.0, 0, 0,
                   "1"], [datetime(day=5, month=3, year=2010), 0.0, 0, 0, "0"],
                   [datetime(day=6, month=3, year=2010), 1.0, 0, 0, "1"
                    ], [datetime(day=7, month=3, year=2010), 0.0, 0, 0, "0"],
                   [datetime(day=8, month=3, year=2010), 1.0, 0, 0, "1"])
        dataSource = FileRecordStream(streamID=filename,
                                      write=True,
                                      fields=fields)
        for r in records:
            dataSource.appendRecord(list(r))

        # Create the network and get region instances.
        net = Network()
        net.addRegion("sensor", "py.RecordSensor", "{'numCategories': 3}")
        net.addRegion("classifier", "py.SDRClassifierRegion",
                      "{steps: '0', alpha: 0.001, implementation: 'py'}")
        net.link("sensor",
                 "classifier",
                 "UniformLink",
                 "",
                 srcOutput="dataOut",
                 destInput="bottomUpIn")
        net.link("sensor",
                 "classifier",
                 "UniformLink",
                 "",
                 srcOutput="categoryOut",
                 destInput="categoryIn")
        sensor = net.regions["sensor"]
        classifier = net.regions["classifier"]

        # Setup sensor region encoder and data stream.
        dataSource.close()
        dataSource = FileRecordStream(filename)
        sensorRegion = sensor.getSelf()
        sensorRegion.encoder = MultiEncoder()
        sensorRegion.encoder.addEncoder(
            "value", ScalarEncoder(21, 0.0, 13.0, n=256, name="value"))
        sensorRegion.dataSource = dataSource

        # Get ready to run.
        net.initialize()

        # Train the network (by default learning is ON in the classifier, but assert
        # anyway) and then turn off learning and turn on inference mode.
        self.assertEqual(classifier.getParameter("learningMode"), 1)
        net.run(8)

        # Test the network on the same data as it trained on; should classify with
        # 100% accuracy.
        classifier.setParameter("inferenceMode", 1)
        classifier.setParameter("learningMode", 0)

        # Assert learning is OFF and that the classifier learned the dataset.
        self.assertEqual(classifier.getParameter("learningMode"), 0,
                         "Learning mode is not turned off.")
        self.assertEqual(classifier.getParameter("inferenceMode"), 1,
                         "Inference mode is not turned on.")

        # make sure we can access all the parameters with getParameter
        self.assertEqual(classifier.getParameter("maxCategoryCount"), 2000)
        self.assertAlmostEqual(float(classifier.getParameter("alpha")), 0.001)
        self.assertEqual(int(classifier.getParameter("steps")), 0)
        self.assertTrue(classifier.getParameter("implementation") == "py")
        self.assertEqual(classifier.getParameter("verbosity"), 0)

        expectedCats = (
            [0.0],
            [1.0],
            [0.0],
            [1.0],
            [0.0],
            [1.0],
            [0.0],
            [1.0],
        )
        dataSource.rewind()
        for i in xrange(8):
            net.run(1)
            inferredCats = classifier.getOutputData("categoriesOut")
            self.assertSequenceEqual(
                expectedCats[i], inferredCats.tolist(),
                "Classififer did not infer expected category "
                "for record number {}.".format(i))
        # Close data stream, delete file.
        dataSource.close()
        os.remove(filename)
Example #44
0
    def __init__(self, columnCount, InputEncoderParams, toL4ConnectorParamsI,
                 toL4ConnectorParamsII, toL5ConnectorParams,
                 toD1ConnectorParams, toD2ConnectorParams, L4Params, L5Params,
                 k, D1Params, D2Params):
        self.columnCount = columnCount
        self.toL4ConnectorParamsI = toL4ConnectorParamsI
        self.toL4ConnectorParamsII = toL4ConnectorParamsII
        self.toL5ConnectorParams = toL5ConnectorParams
        self.toD1ConnectorParams = toD1ConnectorParams
        self.toD2ConnectorParams = toD2ConnectorParams
        self.L4Params = L4Params
        self.L5Params = L5Params
        self.k = k
        self.D1Params = D1Params
        self.D2Params = D2Params
        self.learning = False

        #encoder
        from nupic.encoders import MultiEncoder
        self.InputEncoder = MultiEncoder()
        self.InputEncoder.addMultipleEncoders(InputEncoderParams)
        print "Encoder Online"

        #spatialPoolers
        from nupic.algorithms.spatial_pooler import SpatialPooler
        self.toL4ConnectorI = SpatialPooler(
            inputDimensions=(toL4ConnectorParamsI["inputDimensions"], ),
            columnDimensions=(columnCount, ),
            potentialPct=toL4ConnectorParamsI["potentialPct"],
            globalInhibition=toL4ConnectorParamsI["globalInhibition"],
            localAreaDensity=toL4ConnectorParamsI["localAreaDensity"],
            numActiveColumnsPerInhArea=toL4ConnectorParamsI[
                "numActiveColumnsPerInhArea"],
            synPermInactiveDec=toL4ConnectorParamsI["synPermInactiveDec"],
            synPermActiveInc=toL4ConnectorParamsI["synPermActiveInc"],
            synPermConnected=toL4ConnectorParamsI["synPermConnected"],
            boostStrength=toL4ConnectorParamsI["boostStrength"],
            seed=toL4ConnectorParamsI["seed"],
            wrapAround=toL4ConnectorParamsI["wrapAround"])  #this part sucks
        self.toL4ConnectorII = SpatialPooler(
            inputDimensions=(columnCount * 3, ),
            columnDimensions=(columnCount, ),
            potentialPct=toL4ConnectorParamsII["potentialPct"],
            globalInhibition=toL4ConnectorParamsII["globalInhibition"],
            localAreaDensity=toL4ConnectorParamsII["localAreaDensity"],
            numActiveColumnsPerInhArea=toL4ConnectorParamsII[
                "numActiveColumnsPerInhArea"],
            synPermInactiveDec=toL4ConnectorParamsII["synPermInactiveDec"],
            synPermActiveInc=toL4ConnectorParamsII["synPermActiveInc"],
            synPermConnected=toL4ConnectorParamsII["synPermConnected"],
            boostStrength=toL4ConnectorParamsII["boostStrength"],
            seed=toL4ConnectorParamsII["seed"],
            wrapAround=toL4ConnectorParamsII["wrapAround"])
        print "toL4Connector Online"
        self.toL5Connector = SpatialPooler(
            inputDimensions=(columnCount, ),
            columnDimensions=(columnCount, ),
            potentialPct=toL5ConnectorParams["potentialPct"],
            globalInhibition=toL5ConnectorParams["globalInhibition"],
            localAreaDensity=toL5ConnectorParams["localAreaDensity"],
            numActiveColumnsPerInhArea=toL5ConnectorParams[
                "numActiveColumnsPerInhArea"],
            synPermInactiveDec=toL5ConnectorParams["synPermInactiveDec"],
            synPermActiveInc=toL5ConnectorParams["synPermActiveInc"],
            synPermConnected=toL5ConnectorParams["synPermConnected"],
            boostStrength=toL5ConnectorParams["boostStrength"],
            seed=toL5ConnectorParams["seed"],
            wrapAround=toL5ConnectorParams["wrapAround"])
        print "toL5Connector Online"
        self.toD1Connector = SpatialPooler(
            inputDimensions=(columnCount, ),
            columnDimensions=(columnCount, ),
            potentialPct=toD1ConnectorParams["potentialPct"],
            globalInhibition=toD1ConnectorParams["globalInhibition"],
            localAreaDensity=toD1ConnectorParams["localAreaDensity"],
            numActiveColumnsPerInhArea=toD1ConnectorParams[
                "numActiveColumnsPerInhArea"],
            synPermInactiveDec=toD1ConnectorParams["synPermInactiveDec"],
            synPermActiveInc=toD1ConnectorParams["synPermActiveInc"],
            synPermConnected=toD1ConnectorParams["synPermConnected"],
            boostStrength=toD1ConnectorParams["boostStrength"],
            seed=toD1ConnectorParams["seed"],
            wrapAround=toD1ConnectorParams["wrapAround"])
        print "toD1Connector Online"
        self.toD2Connector = SpatialPooler(
            inputDimensions=(columnCount, ),
            columnDimensions=(columnCount, ),
            potentialPct=toD2ConnectorParams["potentialPct"],
            globalInhibition=toD2ConnectorParams["globalInhibition"],
            localAreaDensity=toD2ConnectorParams["localAreaDensity"],
            numActiveColumnsPerInhArea=toD2ConnectorParams[
                "numActiveColumnsPerInhArea"],
            synPermInactiveDec=toD2ConnectorParams["synPermInactiveDec"],
            synPermActiveInc=toD2ConnectorParams["synPermActiveInc"],
            synPermConnected=toD2ConnectorParams["synPermConnected"],
            boostStrength=toD2ConnectorParams["boostStrength"],
            seed=toD2ConnectorParams["seed"],
            wrapAround=toD2ConnectorParams["wrapAround"])
        print "toD2Connector Online"

        #HTM Layers
        from nupic.algorithms.temporal_memory import TemporalMemory
        self.L4ActiveColumns = numpy.zeros(self.columnCount, dtype=int)
        self.L4 = TemporalMemory(
            columnDimensions=(columnCount, ),
            seed=42,
        )
        print "L4 Online"
        self.L5ActiveColumns = numpy.zeros(self.columnCount, dtype=int)
        self.L5 = TemporalMemory(
            columnDimensions=(columnCount, ),
            seed=42,
        )
        print "L5 Online"
        self.D1ActiveColumns = numpy.zeros(self.columnCount, dtype=int)
        self.D1 = TemporalMemory(
            columnDimensions=(columnCount, ),
            seed=42,
            initialPermanence=0.21,
            connectedPermanence=0.5,
        )
        print "D1 Online"
        self.D2ActiveColumns = numpy.zeros(self.columnCount, dtype=int)
        self.D2 = TemporalMemory(
            columnDimensions=(columnCount, ),
            seed=42,
            initialPermanence=0.21,
            connectedPermanence=0.5,
        )
        print "D2 Online"
Example #45
0
def createEncoder(encoderParams):
  encoder = MultiEncoder()
  encoder.addMultipleEncoders(encoderParams)
  return encoder
Example #46
0
def getDescription(datasets):

    # ========================================================================
    # Encoder for the sensor
    encoder = MultiEncoder()

    if config["encodingFieldStyleA"] == "contiguous":
        encoder.addEncoder(
            "fieldA",
            ScalarEncoder(
                w=config["encodingOnBitsA"],
                n=config["encodingFieldWidthA"],
                minval=0,
                maxval=config["numAValues"],
                periodic=True,
                name="fieldA",
            ),
        )
    elif config["encodingFieldStyleA"] == "sdr":
        encoder.addEncoder(
            "fieldA",
            SDRCategoryEncoder(
                w=config["encodingOnBitsA"],
                n=config["encodingFieldWidthA"],
                categoryList=range(config["numAValues"]),
                name="fieldA",
            ),
        )
    else:
        assert False

    if config["encodingFieldStyleB"] == "contiguous":
        encoder.addEncoder(
            "fieldB",
            ScalarEncoder(
                w=config["encodingOnBitsB"],
                n=config["encodingFieldWidthB"],
                minval=0,
                maxval=config["numBValues"],
                periodic=True,
                name="fieldB",
            ),
        )
    elif config["encodingFieldStyleB"] == "sdr":
        encoder.addEncoder(
            "fieldB",
            SDRCategoryEncoder(
                w=config["encodingOnBitsB"],
                n=config["encodingFieldWidthB"],
                categoryList=range(config["numBValues"]),
                name="fieldB",
            ),
        )
    else:
        assert False

    # ========================================================================
    # Network definition

    # ------------------------------------------------------------------
    # Node params
    # The inputs are long, horizontal vectors
    inputShape = (1, encoder.getWidth())

    # Layout the coincidences vertically stacked on top of each other, each
    # looking at the entire input field.
    coincidencesShape = (config["spCoincCount"], 1)
    inputBorder = inputShape[1] / 2
    if inputBorder * 2 >= inputShape[1]:
        inputBorder -= 1

    sensorParams = dict(
        # encoder/datasource are not parameters so don't include here
        verbosity=config["sensorVerbosity"]
    )

    CLAParams = dict(
        inputShape=inputShape,
        inputBorder=inputBorder,
        coincidencesShape=coincidencesShape,
        coincInputRadius=inputShape[1] / 2,
        coincInputPoolPct=1.0,
        gaussianDist=0,
        commonDistributions=0,  # should be False if possibly not training
        localAreaDensity=-1,  # 0.05,
        numActivePerInhArea=config["spNumActivePerInhArea"],
        dutyCyclePeriod=1000,
        stimulusThreshold=1,
        synPermInactiveDec=config["spSynPermInactiveDec"],
        synPermActiveInc=0.02,
        synPermActiveSharedDec=0.0,
        synPermOrphanDec=0.0,
        minPctDutyCycleBeforeInh=0.001,
        minPctDutyCycleAfterInh=config["spMinPctDutyCycleAfterInh"],
        minDistance=0.05,
        computeTopDown=1,
        spVerbosity=config["spVerbosity"],
        spSeed=1,
        printPeriodicStats=int(config["spPeriodicStats"]),
        # TP params
        disableTemporal=1,
        # General params
        trainingStep="spatial",
    )

    trainingDataSource = FileRecordStream(datasets["trainingFilename"])

    description = dict(
        options=dict(logOutputsDuringInference=False),
        network=dict(
            sensorDataSource=trainingDataSource,
            sensorEncoder=encoder,
            sensorParams=sensorParams,
            CLAType="py.CLARegion",
            CLAParams=CLAParams,
            classifierType=None,
            classifierParams=None,
        ),
    )

    if config["trainSP"]:
        description["spTrain"] = (
            dict(
                iterationCount=config["iterationCount"],
                # iter=displaySPCoincidences(50),
                finish=printSPCoincidences(),
            ),
        )
    else:
        description["spTrain"] = dict(
            # need to train with one iteration just to initialize data structures
            iterationCount=1
        )

    # ============================================================================
    # Inference tests
    inferSteps = []

    # ----------------------------------------
    # Training dataset
    if True:
        datasetName = "bothTraining"
        inferSteps.append(
            dict(
                name="%s_baseline" % datasetName,
                iterationCount=config["iterationCount"],
                setup=[sensorOpen(datasets["trainingFilename"])],
                ppOptions=dict(printLearnedCoincidences=True),
            )
        )

        inferSteps.append(
            dict(
                name="%s_acc" % datasetName,
                iterationCount=config["iterationCount"],
                setup=[sensorOpen(datasets["trainingFilename"])],
                ppOptions=dict(
                    onlyClassificationAcc=True,
                    tpActivationThresholds=config["tpActivationThresholds"],
                    computeDistances=True,
                    verbosity=1,
                ),
            )
        )

    # ----------------------------------------
    # Testing dataset
    if "testingFilename" in datasets:
        datasetName = "bothTesting"
        inferSteps.append(
            dict(
                name="%s_baseline" % datasetName,
                iterationCount=config["iterationCount"],
                setup=[sensorOpen(datasets["testingFilename"])],
                ppOptions=dict(printLearnedCoincidences=False),
            )
        )

        inferSteps.append(
            dict(
                name="%s_acc" % datasetName,
                iterationCount=config["iterationCount"],
                setup=[sensorOpen(datasets["testingFilename"])],
                ppOptions=dict(onlyClassificationAcc=True, tpActivationThresholds=config["tpActivationThresholds"]),
            )
        )

    description["infer"] = inferSteps

    return description
Example #47
0
def createEncoder(encoderParams):
  """Create a multi-encoder from params."""
  encoder = MultiEncoder()
  encoder.addMultipleEncoders(encoderParams)
  return encoder
Example #48
0
def getDescription(datasets):

  # ========================================================================
  # Network definition

  # Encoder for the sensor
  encoder = MultiEncoder()  
  if 'filenameCategory' in datasets:
    categories = [x.strip() for x in 
                              open(datasets['filenameCategory']).xreadlines()]
  else:
    categories = [chr(x+ord('a')) for x in range(26)]

  if config['overlappingPatterns']:
    encoder.addEncoder("name", SDRCategoryEncoder(n=200, 
      w=config['spNumActivePerInhArea'], categoryList=categories, name="name"))
  else:
    encoder.addEncoder("name", CategoryEncoder(w=config['spNumActivePerInhArea'], 
                        categoryList=categories, name="name"))


  # ------------------------------------------------------------------
  # Node params
  # The inputs are long, horizontal vectors
  inputDimensions = (1, encoder.getWidth())

  # Layout the coincidences vertically stacked on top of each other, each
  # looking at the entire input field. 
  columnDimensions = (config['spCoincCount'], 1)

  # If we have disableSpatial, then set the number of "coincidences" to be the
  #  same as the encoder width
  if config['disableSpatial']:
    columnDimensions = (encoder.getWidth(), 1)
    config['trainSP'] = 0

  sensorParams = dict(
    # encoder/datasource are not parameters so don't include here
    verbosity=config['sensorVerbosity']
  )

  CLAParams = dict(
    # SP params
    disableSpatial = config['disableSpatial'],
    inputDimensions = inputDimensions,
    columnDimensions = columnDimensions,
    potentialRadius = inputDimensions[1]/2,
    potentialPct = 1.00,
    gaussianDist = 0,
    commonDistributions = 0,    # should be False if possibly not training
    localAreaDensity = -1, #0.05, 
    numActiveColumnsPerInhArea = config['spNumActivePerInhArea'], 
    dutyCyclePeriod = 1000,
    stimulusThreshold = 1,
    synPermInactiveDec=0.11,
    synPermActiveInc=0.11,
    synPermActiveSharedDec=0.0,
    synPermOrphanDec = 0.0,
    minPctDutyCycleBeforeInh = 0.001,
    minPctDutyCycleAfterInh = 0.001,
    spVerbosity = config['spVerbosity'],
    spSeed = 1,
    printPeriodicStats = int(config['spPrintPeriodicStats']),


    # TP params
    tpSeed = 1,
    disableTemporal = 0 if config['trainTP'] else 1,
    temporalImp = config['temporalImp'],
    nCellsPerCol = config['tpNCellsPerCol'] if config['trainTP'] else 1,

    collectStats = 1,
    burnIn = 2,
    verbosity = config['tpVerbosity'],

    newSynapseCount = config['spNumActivePerInhArea'],
    minThreshold = config['spNumActivePerInhArea'],
    activationThreshold = config['spNumActivePerInhArea'],

    initialPerm = config['tpInitialPerm'],
    connectedPerm = 0.5,
    permanenceInc = config['tpPermanenceInc'],
    permanenceDec = config['tpPermanenceDec'],  # perhaps tune this
    globalDecay = config['tpGlobalDecay'],

    pamLength = config['tpPAMLength'],
    maxSeqLength = config['tpMaxSeqLength'],
    maxAge = config['tpMaxAge'],


    # General params
    computeTopDown = config['computeTopDown'],
    trainingStep = 'spatial',
    )


  dataSource = FileRecordStream(datasets['filenameTrain'])

  description = dict(
    options = dict(
      logOutputsDuringInference = False,
    ),

    network = dict(
      sensorDataSource = dataSource,
      sensorEncoder = encoder, 
      sensorParams = sensorParams,

      CLAType = 'py.CLARegion',
      CLAParams = CLAParams,

      classifierType = None,
      classifierParams = None),
  )

  if config['trainSP']:
    description['spTrain'] = dict(
      iterationCount=config['iterationCountTrain'], 
      #iter=displaySPCoincidences(50),
      #finish=printSPCoincidences()
      ),
  else:
    description['spTrain'] = dict(
      # need to train with one iteration just to initialize data structures
      iterationCount=1)

  if config['trainTP']:
    description['tpTrain'] = []
    for i in xrange(config['trainTPRepeats']):
      stepDict = dict(name='step_%d' % (i), 
                      setup=sensorRewind, 
                      iterationCount=config['iterationCountTrain'],
                      )
      if config['tpTimingEvery'] > 0:
        stepDict['iter'] = printTPTiming(config['tpTimingEvery'])
        stepDict['finish'] = [printTPTiming(), printTPCells]

      description['tpTrain'].append(stepDict)


  # ----------------------------------------------------------------------------
  # Inference tests
  inferSteps = []

  if config['evalTrainingSetNumIterations'] > 0:
    # The training set. Used to train the n-grams. 
    inferSteps.append(
      dict(name = 'confidenceTrain_baseline', 
           iterationCount = min(config['evalTrainingSetNumIterations'], 
                                config['iterationCountTrain']),
           ppOptions = dict(verbosity=config['ppVerbosity'],
                            printLearnedCoincidences=True,
                            nGrams='train',
                            #ipsDetailsFor = "name,None,2",
                            ),
             #finish=printTPCells,
          )
      )

    # Testing the training set on both the TP and n-grams. 
    inferSteps.append(
      dict(name = 'confidenceTrain_nonoise', 
             iterationCount = min(config['evalTrainingSetNumIterations'], 
                                  config['iterationCountTrain']),
             setup = [sensorOpen(datasets['filenameTrain'])],
             ppOptions = dict(verbosity=config['ppVerbosity'],
                              printLearnedCoincidences=False,
                              nGrams='test',
                              burnIns = [1,2,3,4],
                              #ipsDetailsFor = "name,None,2",
                              #ipsAt = [1,2,3,4],
                              ),
            )
        )

    # The test set
  if True:
    if datasets['filenameTest'] != datasets['filenameTrain']:
      inferSteps.append(
        dict(name = 'confidenceTest_baseline', 
             iterationCount = config['iterationCountTest'],
             setup = [sensorOpen(datasets['filenameTest'])],
             ppOptions = dict(verbosity=config['ppVerbosity'],
                              printLearnedCoincidences=False,
                              nGrams='test',
                              burnIns = [1,2,3,4],
                              #ipsAt = [1,2,3,4],
                              ipsDetailsFor = "name,None,2",
                              ),
            )
        )


  description['infer'] = inferSteps

  return description
    def _createNetwork(self):

        def deepupdate(original, update):
            """
            Recursively update a dict.
            Subdict's won't be overwritten but also updated.
            """
            if update is None:
                return None
            for key, value in original.iteritems():
                if not key in update:
                    update[key] = value
                elif isinstance(value, dict):
                    deepupdate(value, update[key])
            return update


        self.network = Network()

        # check
        # if self.selectivity not in self.dest_region_params.keys():
        #     raise Exception, "There is no selected region : " + self.selectivity
        if not len(self.net_structure.keys()) == len(set(self.net_structure.keys())):
            raise Exception, "There is deplicated net_structure keys : " + self.net_structure.keys()

        # sensor
        for sensor_name, params in self.sensor_params.items():
            self.network.addRegion(sensor_name, "py.RecordSensor", json.dumps({"verbosity": 0}))
            sensor = self.network.regions[sensor_name].getSelf()

            # set encoder
            #params = deepupdate(cn.SENSOR_PARAMS, params)
            encoder = MultiEncoder()
            encoder.addMultipleEncoders( params )
            sensor.encoder         = encoder
            sensor.dataSource      = DataBuffer()


        # network
        print 'create element ...'
        for name in self.dest_region_params.keys():
            change_params = self.dest_region_params[name]
            params = deepupdate(self.default_params, change_params)
            # input width
            input_width = 0
            for source in [s for s,d in self.net_structure.items() if name in d]:
                if source in self.sensor_params.keys():
                    sensor = self.network.regions[source].getSelf()
                    input_width += sensor.encoder.getWidth()
                else:
                    input_width += params['TP_PARAMS']['cellsPerColumn'] * params['TP_PARAMS']['columnCount']

            params['SP_PARAMS']['inputWidth'] = input_width
            self._makeRegion(name, params)

        # link
        print 'link network ...'
        for source, dest_list in self.net_structure.items():
            for dest in dest_list:
                if source in self.sensor_params.keys():
                    self._linkRegion(source, dest)
                else:
                    if self.tp_enable:
                        self._linkRegion("tp_" + source, dest)
                    else:
                        self._linkRegion("sp_" + source, dest)

        # initialize
        print 'initializing network ...'
        self.network.initialize()
        for name in self.dest_region_params.keys():
            self._initRegion(name)

        return
    def _createNetwork(self):

        def deepupdate(original, update):
            """
            Recursively update a dict.
            Subdict's won't be overwritten but also updated.
            """
            if update is None:
                return None
            for key, value in original.iteritems():
                if not key in update:
                    update[key] = value
                elif isinstance(value, dict):
                    deepupdate(value, update[key])
            return update




        self.network = Network()

        # check
        if self.selectivity not in self.dest_resgion_data.keys():
            raise Exception, "There is no selected region : " + self.selectivity
        if not len(self.net_structure.keys()) == len(set(self.net_structure.keys())):
            raise Exception, "There is deplicated net_structure keys : " + self.net_structure.keys()

        # sensor
        for sensor_name, change_params in self.sensor_params.items():
            self.network.addRegion(sensor_name, "py.RecordSensor", json.dumps({"verbosity": 0}))
            sensor = self.network.regions[sensor_name].getSelf()

            # set encoder
            params = deepupdate(cn.SENSOR_PARAMS, change_params)
            encoder = MultiEncoder()
            encoder.addMultipleEncoders( params )
            sensor.encoder         = encoder

            # set datasource
            sensor.dataSource      = cn.DataBuffer()


        # network
        print 'create network ...'
        for source, dest_list in self.net_structure.items():
            for dest in dest_list:
                change_params = self.dest_resgion_data[dest]
                params = deepupdate(cn.PARAMS, change_params)

                if source in self.sensor_params.keys():
                    sensor = self.network.regions[source].getSelf()
                    params['SP_PARAMS']['inputWidth'] = sensor.encoder.getWidth()
                    self._addRegion(source, dest, params)
                else:
                    #self._addRegion("sp_" + source, dest, params)
                    self._addRegion("tp_" + source, dest, params)

        # initialize
        print 'initializing network ...'
        self.network.initialize()
        for name in set( itertools.chain.from_iterable( self.net_structure.values() )):
            self._initRegion(name)


        # TODO: 1-3-1構造で, TPのセル数をむやみに増やすことは逆効果になるのでは?

        return
Example #51
0
def getDescription(datasets):

  # ========================================================================
  # Encoder for the sensor
  encoder = MultiEncoder()

  if config['encodingFieldStyleA'] == 'contiguous':
    encoder.addEncoder('fieldA', ScalarEncoder(w=config['encodingOnBitsA'],
                        n=config['encodingFieldWidthA'], minval=0,
                        maxval=config['numAValues'], periodic=True, name='fieldA'))
  elif config['encodingFieldStyleA'] == 'sdr':
    encoder.addEncoder('fieldA', SDRCategoryEncoder(w=config['encodingOnBitsA'],
                        n=config['encodingFieldWidthA'],
                        categoryList=range(config['numAValues']), name='fieldA'))
  else:
    assert False


  if config['encodingFieldStyleB'] == 'contiguous':
    encoder.addEncoder('fieldB', ScalarEncoder(w=config['encodingOnBitsB'], 
                      n=config['encodingFieldWidthB'], minval=0, 
                      maxval=config['numBValues'], periodic=True, name='fieldB'))
  elif config['encodingFieldStyleB'] == 'zero':
    encoder.addEncoder('fieldB', SDRRandomEncoder(w=0, n=config['encodingFieldWidthB'], 
                      name='fieldB'))
  elif config['encodingFieldStyleB'] == 'sdr':
    encoder.addEncoder('fieldB', SDRCategoryEncoder(w=config['encodingOnBitsB'], 
                      n=config['encodingFieldWidthB'], 
                      categoryList=range(config['numBValues']), name='fieldB'))
  else:
    assert False



  # ========================================================================
  # Network definition


  # ------------------------------------------------------------------
  # Node params
  # The inputs are long, horizontal vectors
  inputShape = (1, encoder.getWidth())

  # Layout the coincidences vertically stacked on top of each other, each
  # looking at the entire input field. 
  coincidencesShape = (config['spCoincCount'], 1)
  inputBorder = inputShape[1]/2
  if inputBorder*2 >= inputShape[1]:
    inputBorder -= 1

  sensorParams = dict(
    # encoder/datasource are not parameters so don't include here
    verbosity=config['sensorVerbosity']
  )

  CLAParams = dict(
    inputShape = inputShape,
    inputBorder = inputBorder,
    coincidencesShape = coincidencesShape,
    coincInputRadius = inputShape[1]/2,
    coincInputPoolPct = 1.0,
    gaussianDist = 0,
    commonDistributions = 0,    # should be False if possibly not training
    localAreaDensity = -1, #0.05, 
    numActivePerInhArea = config['spNumActivePerInhArea'],
    dutyCyclePeriod = 1000,
    stimulusThreshold = 1,
    synPermInactiveDec = config['spSynPermInactiveDec'],
    synPermActiveInc = 0.02,
    synPermActiveSharedDec=0.0,
    synPermOrphanDec = 0.0,
    minPctDutyCycleBeforeInh = 0.001,
    minPctDutyCycleAfterInh = config['spMinPctDutyCycleAfterInh'],
    minDistance = 0.05,
    computeTopDown = 1,
    spVerbosity = config['spVerbosity'],
    spSeed = 1,
    printPeriodicStats = int(config['spPeriodicStats']),

    # TP params
    disableTemporal = 1,

    # General params
    trainingStep = 'spatial',
    )

  trainingDataSource = FileRecordStream(datasets['trainingFilename'])


  description = dict(
    options = dict(
      logOutputsDuringInference = False,
    ),

    network = dict(
      sensorDataSource = trainingDataSource,
      sensorEncoder = encoder, 
      sensorParams = sensorParams,

      CLAType = 'py.CLARegion',
      CLAParams = CLAParams,

      classifierType = None,
      classifierParams = None),

  )

  if config['trainSP']:
    description['spTrain'] = dict(
      iterationCount=config['iterationCount'], 
      #iter=displaySPCoincidences(50),
      finish=printSPCoincidences()
      ),
  else:
    description['spTrain'] = dict(
      # need to train with one iteration just to initialize data structures
      iterationCount=1)



  # ============================================================================
  # Inference tests
  inferSteps = []

  # ----------------------------------------
  # Training dataset
  if True:
    datasetName = 'bothTraining'
    inferSteps.append(
      dict(name = '%s_baseline' % datasetName, 
           iterationCount = config['iterationCount'],
           setup = [sensorOpen(datasets['trainingFilename'])],
           ppOptions = dict(printLearnedCoincidences=True),
          )
      )

    inferSteps.append(
      dict(name = '%s_acc' % datasetName, 
           iterationCount = config['iterationCount'],
           setup = [sensorOpen(datasets['trainingFilename'])],
           ppOptions = dict(onlyClassificationAcc=True,
                            tpActivationThresholds=config['tpActivationThresholds'],
                            computeDistances=True,
                            verbosity = 1),
          )
      )

  # ----------------------------------------
  # Testing dataset
  if 'testingFilename' in datasets:
    datasetName = 'bothTesting'
    inferSteps.append(
      dict(name = '%s_baseline' % datasetName, 
           iterationCount = config['iterationCount'],
           setup = [sensorOpen(datasets['testingFilename'])],
           ppOptions = dict(printLearnedCoincidences=False),
          )
      )

    inferSteps.append(
      dict(name = '%s_acc' % datasetName, 
           iterationCount = config['iterationCount'],
           setup = [sensorOpen(datasets['testingFilename'])],
           ppOptions = dict(onlyClassificationAcc=True,
                            tpActivationThresholds=config['tpActivationThresholds']),
          )
      )


  description['infer'] = inferSteps

  return description
Example #52
0
def getDescription(datasets):

    # ========================================================================
    # Network definition

    # Encoder for the sensor
    encoder = MultiEncoder()
    if 'filenameCategory' in datasets:
        categories = [x.strip() for x in open(datasets['filenameCategory'])]
    else:
        categories = [chr(x + ord('a')) for x in range(26)]

    if config['overlappingPatterns']:
        encoder.addEncoder(
            "name",
            SDRCategoryEncoder(n=200,
                               w=config['spNumActivePerInhArea'],
                               categoryList=categories,
                               name="name"))
    else:
        encoder.addEncoder(
            "name",
            CategoryEncoder(w=config['spNumActivePerInhArea'],
                            categoryList=categories,
                            name="name"))

    # ------------------------------------------------------------------
    # Node params
    # The inputs are long, horizontal vectors
    inputDimensions = (1, encoder.getWidth())

    # Layout the coincidences vertically stacked on top of each other, each
    # looking at the entire input field.
    columnDimensions = (config['spCoincCount'], 1)

    # If we have disableSpatial, then set the number of "coincidences" to be the
    #  same as the encoder width
    if config['disableSpatial']:
        columnDimensions = (encoder.getWidth(), 1)
        config['trainSP'] = 0

    sensorParams = dict(
        # encoder/datasource are not parameters so don't include here
        verbosity=config['sensorVerbosity'])

    CLAParams = dict(
        # SP params
        disableSpatial=config['disableSpatial'],
        inputDimensions=inputDimensions,
        columnDimensions=columnDimensions,
        potentialRadius=inputDimensions[1] / 2,
        potentialPct=1.00,
        gaussianDist=0,
        commonDistributions=0,  # should be False if possibly not training
        localAreaDensity=-1,  #0.05, 
        numActiveColumnsPerInhArea=config['spNumActivePerInhArea'],
        dutyCyclePeriod=1000,
        stimulusThreshold=1,
        synPermInactiveDec=0.11,
        synPermActiveInc=0.11,
        synPermActiveSharedDec=0.0,
        synPermOrphanDec=0.0,
        minPctDutyCycleBeforeInh=0.001,
        minPctDutyCycleAfterInh=0.001,
        spVerbosity=config['spVerbosity'],
        spSeed=1,
        printPeriodicStats=int(config['spPrintPeriodicStats']),

        # TM params
        tpSeed=1,
        disableTemporal=0 if config['trainTP'] else 1,
        temporalImp=config['temporalImp'],
        nCellsPerCol=config['tpNCellsPerCol'] if config['trainTP'] else 1,
        collectStats=1,
        burnIn=2,
        verbosity=config['tpVerbosity'],
        newSynapseCount=config['spNumActivePerInhArea'],
        minThreshold=config['spNumActivePerInhArea'],
        activationThreshold=config['spNumActivePerInhArea'],
        initialPerm=config['tpInitialPerm'],
        connectedPerm=0.5,
        permanenceInc=config['tpPermanenceInc'],
        permanenceDec=config['tpPermanenceDec'],  # perhaps tune this
        globalDecay=config['tpGlobalDecay'],
        pamLength=config['tpPAMLength'],
        maxSeqLength=config['tpMaxSeqLength'],
        maxAge=config['tpMaxAge'],

        # General params
        computeTopDown=config['computeTopDown'],
        trainingStep='spatial',
    )

    dataSource = FileRecordStream(datasets['filenameTrain'])

    description = dict(
        options=dict(logOutputsDuringInference=False, ),
        network=dict(sensorDataSource=dataSource,
                     sensorEncoder=encoder,
                     sensorParams=sensorParams,
                     CLAType='py.CLARegion',
                     CLAParams=CLAParams,
                     classifierType=None,
                     classifierParams=None),
    )

    if config['trainSP']:
        description['spTrain'] = dict(
            iterationCount=config['iterationCountTrain'],
            #iter=displaySPCoincidences(50),
            #finish=printSPCoincidences()
        ),
    else:
        description['spTrain'] = dict(
            # need to train with one iteration just to initialize data structures
            iterationCount=1)

    if config['trainTP']:
        description['tpTrain'] = []
        for i in range(config['trainTPRepeats']):
            stepDict = dict(
                name='step_%d' % (i),
                setup=sensorRewind,
                iterationCount=config['iterationCountTrain'],
            )
            if config['tpTimingEvery'] > 0:
                stepDict['iter'] = printTPTiming(config['tpTimingEvery'])
                stepDict['finish'] = [printTPTiming(), printTPCells]

            description['tpTrain'].append(stepDict)

    # ----------------------------------------------------------------------------
    # Inference tests
    inferSteps = []

    if config['evalTrainingSetNumIterations'] > 0:
        # The training set. Used to train the n-grams.
        inferSteps.append(
            dict(
                name='confidenceTrain_baseline',
                iterationCount=min(config['evalTrainingSetNumIterations'],
                                   config['iterationCountTrain']),
                ppOptions=dict(
                    verbosity=config['ppVerbosity'],
                    printLearnedCoincidences=True,
                    nGrams='train',
                    #ipsDetailsFor = "name,None,2",
                ),
                #finish=printTPCells,
            ))

        # Testing the training set on both the TM and n-grams.
        inferSteps.append(
            dict(
                name='confidenceTrain_nonoise',
                iterationCount=min(config['evalTrainingSetNumIterations'],
                                   config['iterationCountTrain']),
                setup=[sensorOpen(datasets['filenameTrain'])],
                ppOptions=dict(
                    verbosity=config['ppVerbosity'],
                    printLearnedCoincidences=False,
                    nGrams='test',
                    burnIns=[1, 2, 3, 4],
                    #ipsDetailsFor = "name,None,2",
                    #ipsAt = [1,2,3,4],
                ),
            ))

        # The test set
    if True:
        if datasets['filenameTest'] != datasets['filenameTrain']:
            inferSteps.append(
                dict(
                    name='confidenceTest_baseline',
                    iterationCount=config['iterationCountTest'],
                    setup=[sensorOpen(datasets['filenameTest'])],
                    ppOptions=dict(
                        verbosity=config['ppVerbosity'],
                        printLearnedCoincidences=False,
                        nGrams='test',
                        burnIns=[1, 2, 3, 4],
                        #ipsAt = [1,2,3,4],
                        ipsDetailsFor="name,None,2",
                    ),
                ))

    description['infer'] = inferSteps

    return description
Example #53
0
def getDescription(datasets):

  # ========================================================================
  # Encoder for the sensor
  encoder = MultiEncoder()

  if config['encodingFieldStyleA'] == 'contiguous':
    encoder.addEncoder('fieldA', ScalarEncoder(w=config['encodingOnBitsA'],
                        n=config['encodingFieldWidthA'], minval=0,
                        maxval=config['numAValues'], periodic=True, name='fieldA'))
  elif config['encodingFieldStyleA'] == 'sdr':
    encoder.addEncoder('fieldA', SDRCategoryEncoder(w=config['encodingOnBitsA'],
                        n=config['encodingFieldWidthA'],
                        categoryList=range(config['numAValues']), name='fieldA'))
  else:
    assert False


  if config['encodingFieldStyleB'] == 'contiguous':
    encoder.addEncoder('fieldB', ScalarEncoder(w=config['encodingOnBitsB'], 
                      n=config['encodingFieldWidthB'], minval=0, 
                      maxval=config['numBValues'], periodic=True, name='fieldB'))
  elif config['encodingFieldStyleB'] == 'sdr':
    encoder.addEncoder('fieldB', SDRCategoryEncoder(w=config['encodingOnBitsB'], 
                      n=config['encodingFieldWidthB'], 
                      categoryList=range(config['numBValues']), name='fieldB'))
  else:
    assert False



  # ========================================================================
  # Network definition


  # ------------------------------------------------------------------
  # Node params
  # The inputs are long, horizontal vectors
  inputDimensions = (1, encoder.getWidth())

  # Layout the coincidences vertically stacked on top of each other, each
  # looking at the entire input field. 
  columnDimensions = (config['spCoincCount'], 1)

  sensorParams = dict(
    # encoder/datasource are not parameters so don't include here
    verbosity=config['sensorVerbosity']
  )

  CLAParams = dict(
    inputDimensions = inputDimensions,
    columnDimensions = columnDimensions,
    potentialRadius = inputDimensions[1]/2,
    potentialPct = 1.0,
    gaussianDist = 0,
    commonDistributions = 0,    # should be False if possibly not training
    localAreaDensity = -1, #0.05, 
    numActiveColumnsPerInhArea = config['spNumActivePerInhArea'],
    dutyCyclePeriod = 1000,
    stimulusThreshold = 1,
    synPermInactiveDec = config['spSynPermInactiveDec'],
    synPermActiveInc = 0.02,
    synPermActiveSharedDec=0.0,
    synPermOrphanDec = 0.0,
    minPctDutyCycleBeforeInh = 0.001,
    minPctDutyCycleAfterInh = config['spMinPctDutyCycleAfterInh'],
    minDistance = 0.05,
    computeTopDown = 1,
    spVerbosity = config['spVerbosity'],
    spSeed = 1,
    printPeriodicStats = int(config['spPeriodicStats']),

    # TP params
    disableTemporal = 1,

    # General params
    trainingStep = 'spatial',
    )

  trainingDataSource = FileRecordStream(datasets['trainingFilename'])


  description = dict(
    options = dict(
      logOutputsDuringInference = False,
    ),

    network = dict(
      sensorDataSource = trainingDataSource,
      sensorEncoder = encoder, 
      sensorParams = sensorParams,

      CLAType = 'py.CLARegion',
      CLAParams = CLAParams,

      classifierType = None,
      classifierParams = None),

  )

  if config['trainSP']:
    description['spTrain'] = dict(
      iterationCount=config['iterationCount'], 
      #iter=displaySPCoincidences(50),
      finish=printSPCoincidences()
      ),
  else:
    description['spTrain'] = dict(
      # need to train with one iteration just to initialize data structures
      iterationCount=1)



  # ============================================================================
  # Inference tests
  inferSteps = []

  # ----------------------------------------
  # Training dataset
  if True:
    datasetName = 'bothTraining'
    inferSteps.append(
      dict(name = '{0!s}_baseline'.format(datasetName), 
           iterationCount = config['iterationCount'],
           setup = [sensorOpen(datasets['trainingFilename'])],
           ppOptions = dict(printLearnedCoincidences=True),
          )
      )

    inferSteps.append(
      dict(name = '{0!s}_acc'.format(datasetName), 
           iterationCount = config['iterationCount'],
           setup = [sensorOpen(datasets['trainingFilename'])],
           ppOptions = dict(onlyClassificationAcc=True,
                            tpActivationThresholds=config['tpActivationThresholds'],
                            computeDistances=True,
                            verbosity = 1),
          )
      )

  # ----------------------------------------
  # Testing dataset
  if 'testingFilename' in datasets:
    datasetName = 'bothTesting'
    inferSteps.append(
      dict(name = '{0!s}_baseline'.format(datasetName), 
           iterationCount = config['iterationCount'],
           setup = [sensorOpen(datasets['testingFilename'])],
           ppOptions = dict(printLearnedCoincidences=False),
          )
      )

    inferSteps.append(
      dict(name = '{0!s}_acc'.format(datasetName), 
           iterationCount = config['iterationCount'],
           setup = [sensorOpen(datasets['testingFilename'])],
           ppOptions = dict(onlyClassificationAcc=True,
                            tpActivationThresholds=config['tpActivationThresholds']),
          )
      )


  description['infer'] = inferSteps

  return description
Example #54
0
    def _get_encoder(self):
        # date encoding
        #date_enc = DateEncoder(name='date', forced=True)
        day_of_week_enc = ScalarEncoder(w=21, minval=0, maxval=7, radius=1.5,
                                        periodic=True, name=COL_DAY_OF_WEEK, forced=True)
        day_of_month_enc = ScalarEncoder(w=21, minval=1, maxval=31, radius=1.5,
                                         periodic=False, name=COL_DAY_OF_MONTH, forced=True)
        first_last_of_month_enc = ScalarEncoder(w=21, minval=0, maxval=2, radius=1, periodic=False,
                                                name=COL_FIRST_LAST_MONTH, forced=True)
        week_of_month_enc = ScalarEncoder(w=21, minval=0, maxval=6, radius=1.5,
                                          periodic=True, name=COL_WEEK_OF_MONTH, forced=True)
        month_of_year_enc = ScalarEncoder(w=21, minval=1, maxval=13, radius=1.5,
                                          periodic=True, name=COL_MONTH_OF_YEAR, forced=True)
        quarter_of_year_enc = ScalarEncoder(w=21, minval=0, maxval=4, radius=1.5,
                                            periodic=True, name=COL_QUART_OF_YEAR, forced=True)
        half_of_year_enc = ScalarEncoder(w=21, minval=0, maxval=2,
                                         radius=1, periodic=True, name=COL_HALF_OF_YEAR, forced=True)
        year_of_decade_enc = ScalarEncoder(w=21, minval=0, maxval=10, radius=1.5,
                                           periodic=True, name=COL_YEAR_OF_DECADE, forced=True)

        # semantics encoder
        stoch_rsi_enc = ScalarEncoder(w=21, minval=0, maxval=1,
                                      radius=0.05, periodic=False, name=COL_STOCH_RSI, forced=True)
        # symbol_enc = ScalarEncoder(w=21, minval=0, maxval=1, radius=0.1, periodic=False, name=COL_SYMBOL, forced=True)
        candlestick_enc = PassThroughEncoder(50, name=COL_CANDLESTICK, forced=True)

        encoder = MultiEncoder()
        encoder.addEncoder(day_of_week_enc.name, day_of_week_enc)
        encoder.addEncoder(day_of_month_enc.name, day_of_month_enc)
        encoder.addEncoder(first_last_of_month_enc.name, first_last_of_month_enc)
        encoder.addEncoder(week_of_month_enc.name, week_of_month_enc)
        encoder.addEncoder(year_of_decade_enc.name, year_of_decade_enc)
        encoder.addEncoder(month_of_year_enc.name, month_of_year_enc)
        encoder.addEncoder(quarter_of_year_enc.name, quarter_of_year_enc)
        encoder.addEncoder(half_of_year_enc.name, half_of_year_enc)

        encoder.addEncoder(stoch_rsi_enc.name, stoch_rsi_enc)
        # encoder.addEncoder(symbol_enc.name, symbol_enc)
        encoder.addEncoder(candlestick_enc.name, candlestick_enc)

        return encoder
Example #55
0
def createTemporalAnomaly(recordParams, spatialParams=_SP_PARAMS,
                          temporalParams=_TP_PARAMS,
                          verbosity=_VERBOSITY):
  """Generates a Network with connected RecordSensor, SP, TP.

  This function takes care of generating regions and the canonical links.
  The network has a sensor region reading data from a specified input and
  passing the encoded representation to an SPRegion.
  The SPRegion output is passed to a TPRegion.

  Note: this function returns a network that needs to be initialized. This
  allows the user to extend the network by adding further regions and
  connections.

  :param recordParams: a dict with parameters for creating RecordSensor region.
  :param spatialParams: a dict with parameters for creating SPRegion.
  :param temporalParams: a dict with parameters for creating TPRegion.
  :param verbosity: an integer representing how chatty the network will be.
  """
  inputFilePath= recordParams["inputFilePath"]
  scalarEncoderArgs = recordParams["scalarEncoderArgs"]
  dateEncoderArgs = recordParams["dateEncoderArgs"]

  scalarEncoder = ScalarEncoder(**scalarEncoderArgs)
  dateEncoder = DateEncoder(**dateEncoderArgs)

  encoder = MultiEncoder()
  encoder.addEncoder(scalarEncoderArgs["name"], scalarEncoder)
  encoder.addEncoder(dateEncoderArgs["name"], dateEncoder)

  network = Network()

  network.addRegion("sensor", "py.RecordSensor",
                    json.dumps({"verbosity": verbosity}))

  sensor = network.regions["sensor"].getSelf()
  sensor.encoder = encoder
  sensor.dataSource = FileRecordStream(streamID=inputFilePath)

  # Create the spatial pooler region
  spatialParams["inputWidth"] = sensor.encoder.getWidth()
  network.addRegion("spatialPoolerRegion", "py.SPRegion",
                    json.dumps(spatialParams))

  # Link the SP region to the sensor input
  network.link("sensor", "spatialPoolerRegion", "UniformLink", "")
  network.link("sensor", "spatialPoolerRegion", "UniformLink", "",
               srcOutput="resetOut", destInput="resetIn")
  network.link("spatialPoolerRegion", "sensor", "UniformLink", "",
               srcOutput="spatialTopDownOut", destInput="spatialTopDownIn")
  network.link("spatialPoolerRegion", "sensor", "UniformLink", "",
               srcOutput="temporalTopDownOut", destInput="temporalTopDownIn")

  # Add the TPRegion on top of the SPRegion
  network.addRegion("temporalPoolerRegion", "py.TPRegion",
                    json.dumps(temporalParams))

  network.link("spatialPoolerRegion", "temporalPoolerRegion", "UniformLink", "")
  network.link("temporalPoolerRegion", "spatialPoolerRegion", "UniformLink", "",
               srcOutput="topDownOut", destInput="topDownIn")


  spatialPoolerRegion = network.regions["spatialPoolerRegion"]

  # Make sure learning is enabled
  spatialPoolerRegion.setParameter("learningMode", True)
  # We want temporal anomalies so disable anomalyMode in the SP. This mode is
  # used for computing anomalies in a non-temporal model.
  spatialPoolerRegion.setParameter("anomalyMode", False)

  temporalPoolerRegion = network.regions["temporalPoolerRegion"]

  # Enable topDownMode to get the predicted columns output
  temporalPoolerRegion.setParameter("topDownMode", True)
  # Make sure learning is enabled (this is the default)
  temporalPoolerRegion.setParameter("learningMode", True)
  # Enable inference mode so we get predictions
  temporalPoolerRegion.setParameter("inferenceMode", True)
  # Enable anomalyMode to compute the anomaly score.
  temporalPoolerRegion.setParameter("anomalyMode", True)

  return network
Example #56
0
        self.monthOfYear = date.month
        self.quarterOfYear = month.quarter
        self.halfOfYear = month.half


if __name__ == "__main__":
    day_of_week_enc = ScalarEncoder(w=3, minval=0, maxval=7, radius=1.5, periodic=True, name="dayOfWeek", forced=True)
    day_of_month_enc = ScalarEncoder(w=3, minval=1, maxval=31, radius=1.5, periodic=False, name="dayOfMonth", forced=True)
    first_last_of_month_enc = ScalarEncoder(w=1, minval=0, maxval=2, radius=1, periodic=False, name="firstLastOfMonth", forced=True)
    week_of_month_enc = ScalarEncoder(w=3, minval=0, maxval=6, radius=1.5, periodic=True, name="weekOfMonth", forced=True)
    month_of_year_enc = ScalarEncoder(w=3, minval=1, maxval=13, radius=1.5, periodic=True, name="monthOfYear", forced=True)
    quarter_of_year_enc = ScalarEncoder(w=3, minval=0, maxval=4, radius=1.5, periodic=True, name="quarterOfYear", forced=True)
    half_of_year_enc = ScalarEncoder(w=1, minval=0, maxval=2, radius=1, periodic=True, name="halfOfYear", forced=True)
    year_of_decade_enc = ScalarEncoder(w=3, minval=0, maxval=10, radius=1.5, periodic=True, name="yearOfDecade", forced=True)

    date_enc = MultiEncoder()
    date_enc.addEncoder(day_of_week_enc.name, day_of_week_enc)
    date_enc.addEncoder(day_of_month_enc.name, day_of_month_enc)
    date_enc.addEncoder(first_last_of_month_enc.name, first_last_of_month_enc)
    date_enc.addEncoder(week_of_month_enc.name, week_of_month_enc)
    date_enc.addEncoder(year_of_decade_enc.name, year_of_decade_enc)
    date_enc.addEncoder(month_of_year_enc.name, month_of_year_enc)
    date_enc.addEncoder(quarter_of_year_enc.name, quarter_of_year_enc)
    date_enc.addEncoder(half_of_year_enc.name, half_of_year_enc)

    if os.path.isfile('tp.p'):
	print "loading TP from tp.p and tp.tp"
	with open("tp.p", "r") as f:
	    tp = pickle.load(f)
	tp.loadFromFile("tp.tp")
    else:
Example #57
0
def createEncoder():
    volume_encoder = ScalarEncoder(21,
                                   0.0,
                                   20.0,
                                   n=200,
                                   name="volume",
                                   clipInput=False)
    floorheight_encoder = ScalarEncoder(21,
                                        0.0,
                                        24.0,
                                        n=125,
                                        name="floorheight",
                                        clipInput=False)

    diagCoorA_encoder = ScalarEncoder(21,
                                      0.0,
                                      200.0,
                                      n=200,
                                      name="diagCoorA",
                                      clipInput=False)
    diagCoorB_encoder = ScalarEncoder(21,
                                      0.0,
                                      200.0,
                                      n=200,
                                      name="diagCoorB",
                                      clipInput=False)
    diagCoorC_encoder = ScalarEncoder(21,
                                      0.0,
                                      200.0,
                                      n=200,
                                      name="diagCoorC",
                                      clipInput=False)
    diagCoorD_encoder = ScalarEncoder(21,
                                      0.0,
                                      200.0,
                                      n=200,
                                      name="diagCoorD",
                                      clipInput=False)
    diagCoorE_encoder = ScalarEncoder(21,
                                      0.0,
                                      200.0,
                                      n=200,
                                      name="diagCoorE",
                                      clipInput=False)
    diagCoorF_encoder = ScalarEncoder(21,
                                      0.0,
                                      200.0,
                                      n=200,
                                      name="diagCoorF",
                                      clipInput=False)
    diagCoorG_encoder = ScalarEncoder(21,
                                      0.0,
                                      200.0,
                                      n=200,
                                      name="diagCoorG",
                                      clipInput=False)
    diagCoorH_encoder = ScalarEncoder(21,
                                      0.0,
                                      200.0,
                                      n=200,
                                      name="diagCoorH",
                                      clipInput=False)
    diagCoorI_encoder = ScalarEncoder(21,
                                      0.0,
                                      200.0,
                                      n=200,
                                      name="diagCoorI",
                                      clipInput=False)
    diagCoorJ_encoder = ScalarEncoder(21,
                                      0.0,
                                      200.0,
                                      n=200,
                                      name="diagCoorJ",
                                      clipInput=False)

    global encoder
    encoder = MultiEncoder()

    encoder.addEncoder("volume", volume_encoder)
    encoder.addEncoder("floorheight", floorheight_encoder)
    encoder.addEncoder("diagCoorA", diagCoorA_encoder)
    encoder.addEncoder("diagCoorB", diagCoorB_encoder)
    encoder.addEncoder("diagCoorC", diagCoorC_encoder)
    encoder.addEncoder("diagCoorD", diagCoorD_encoder)
    encoder.addEncoder("diagCoorE", diagCoorE_encoder)
    encoder.addEncoder("diagCoorF", diagCoorF_encoder)
    encoder.addEncoder("diagCoorG", diagCoorG_encoder)
    encoder.addEncoder("diagCoorH", diagCoorH_encoder)
    encoder.addEncoder("diagCoorI", diagCoorI_encoder)
    encoder.addEncoder("diagCoorJ", diagCoorJ_encoder)

    return encoder
Example #58
0
def getDescription(datasets):
  encoder = MultiEncoder()
  encoder.addEncoder("date", DateEncoder(timeOfDay=3))
  encoder.addEncoder("amount", LogEncoder(name="amount", maxval=1000))
  for i in xrange(0, nRandomFields):
    s = ScalarEncoder(name="scalar", minval=0, maxval=randomFieldWidth, resolution=1, w=3)
    encoder.addEncoder("random%d" % i, s)

  dataSource = FunctionSource(generateFunction, dict(nRandomFields=nRandomFields,
                                                 randomFieldWidth=randomFieldWidth))

  inputShape = (1, encoder.getWidth())

  # Layout the coincidences vertically stacked on top of each other, each
  # looking at the entire input field.
  coincidencesShape = (nCoincidences, 1)
  # TODO: why do we need input border?
  inputBorder = inputShape[1]/2
  if inputBorder*2 >= inputShape[1]:
    inputBorder -= 1


  nodeParams = dict()

  spParams = dict(
        commonDistributions=0,
        inputShape = inputShape,
        inputBorder = inputBorder,
        coincidencesShape = coincidencesShape,
        coincInputRadius = inputShape[1]/2,
        coincInputPoolPct = 0.75,
        gaussianDist = 0,
        localAreaDensity = 0.10,
        # localAreaDensity = 0.04,
        numActivePerInhArea = -1,
        dutyCyclePeriod = 1000,
        stimulusThreshold = 5,
        synPermInactiveDec=0.08,
        # synPermInactiveDec=0.02,
        synPermActiveInc=0.02,
        synPermActiveSharedDec=0.0,
        synPermOrphanDec = 0.0,
        minPctDutyCycleBeforeInh = 0.05,
        # minPctDutyCycleAfterInh = 0.1,
        # minPctDutyCycleBeforeInh = 0.05,
        minPctDutyCycleAfterInh = 0.05,
        # minPctDutyCycleAfterInh = 0.4,
        seed = 1,
  )

  otherParams = dict(
    disableTemporal=1,
    trainingStep='spatial',

  )

  nodeParams.update(spParams)
  nodeParams.update(otherParams)

  def mySetupCallback(experiment):
    print "Setup function called"

  description = dict(
    options = dict(
      logOutputsDuringInference = False,
    ),

    network = dict(
      sensorDataSource = dataSource,
      sensorEncoder = encoder,
      CLAType = "py.CLARegion",
      CLAParams = nodeParams,
      classifierType = None,
      classifierParams = None),

    # step
    spTrain = dict(
      name="phase1",
      setup=mySetupCallback,
      iterationCount=5000,
      #iter=displaySPCoincidences(100),
      finish=printSPCoincidences()),

    tpTrain = None,        # same format as sptrain if non-empty

    infer = None,          # same format as sptrain if non-empty

  )

  return description