Пример #1
0
def _buildArgs(f, self=None, kwargs={}):
  """
  Get the default arguments from the function and assign as instance vars.

  Return a list of 3-tuples with (name, description, defaultValue) for each
    argument to the function.

  Assigns all arguments to the function as instance variables of CLARegion.
  If the argument was not provided, uses the default value.

  Pops any values from kwargs that go to the function.
  """
  # Get the name, description, and default value for each argument
  argTuples = getArgumentDescriptions(f)
  argTuples = argTuples[1:]  # Remove 'self'

  # Get the names of the parameters to our own constructor and remove them
  # Check for _originial_init first, because if LockAttributesMixin is used,
  #  __init__'s signature will be just (self, *args, **kw), but
  #  _original_init is created with the original signature
  #init = getattr(self, '_original_init', self.__init__)
  init = CLARegion.__init__
  ourArgNames = [t[0] for t in getArgumentDescriptions(init)]
  # Also remove a few other names that aren't in our constructor but are
  #  computed automatically (e.g. numberOfCols for the TP)
  ourArgNames += [
    'numberOfCols',    # TP
    'cellsPerColumn',  # TP
    'nCells',          # FDRSTemporal
    'cloneMap',        # FDRSTemporal / FDRCSpatial
    'numCloneMasters', # FDRSTemporal / FDRCSpatial
    'whichCellsClass', # FDRSTemporal
  ]
  for argTuple in argTuples[:]:
    if argTuple[0] in ourArgNames:
      argTuples.remove(argTuple)

  # Build the dictionary of arguments
  if self:
    for argTuple in argTuples:
      argName = argTuple[0]
      if argName in kwargs:
        # Argument was provided
        argValue = kwargs.pop(argName)
      else:
        # Argument was not provided; use the default value if there is one, and
        #  raise an exception otherwise
        if len(argTuple) == 2:
          # No default value
          raise TypeError("Must provide '%s'" % argName)
        argValue = argTuple[2]
      # Set as an instance variable if 'self' was passed in
      setattr(self, argName, argValue)

  return argTuples
Пример #2
0
def _buildArgs(f, self=None, kwargs={}):
  """
  Get the default arguments from the function and assign as instance vars.

  Return a list of 3-tuples with (name, description, defaultValue) for each
    argument to the function.

  Assigns all arguments to the function as instance variables of SPRegion.
  If the argument was not provided, uses the default value.

  Pops any values from kwargs that go to the function.

  """
  # Get the name, description, and default value for each argument
  argTuples = getArgumentDescriptions(f)
  argTuples = argTuples[1:]  # Remove 'self'

  # Get the names of the parameters to our own constructor and remove them
  # Check for _originial_init first, because if LockAttributesMixin is used,
  #  __init__'s signature will be just (self, *args, **kw), but
  #  _original_init is created with the original signature
  #init = getattr(self, '_original_init', self.__init__)
  init = SPRegion.__init__
  ourArgNames = [t[0] for t in getArgumentDescriptions(init)]
  # Also remove a few other names that aren't in our constructor but are
  #  computed automatically (e.g. numberOfCols for the TP)
  # TODO: where does numberOfCols come into SPRegion?
  ourArgNames += [
    'numberOfCols',
  ]
  for argTuple in argTuples[:]:
    if argTuple[0] in ourArgNames:
      argTuples.remove(argTuple)

  # Build the dictionary of arguments
  if self:
    for argTuple in argTuples:
      argName = argTuple[0]
      if argName in kwargs:
        # Argument was provided
        argValue = kwargs.pop(argName)
      else:
        # Argument was not provided; use the default value if there is one, and
        #  raise an exception otherwise
        if len(argTuple) == 2:
          # No default value
          raise TypeError("Must provide value for '%s'" % argName)
        argValue = argTuple[2]
      # Set as an instance variable if 'self' was passed in
      setattr(self, argName, argValue)

  return argTuples
Пример #3
0
def _buildArgs(tmClass, self=None, kwargs={}):
  """
  Get the default arguments from the function and assign as instance vars.

  Return a list of 3-tuples with (name, description, defaultValue) for each
    argument to the function.

  Assigns all arguments to the function as instance variables of TMRegion.
  If the argument was not provided, uses the default value.

  Pops any values from kwargs that go to the function.

  """
  # Get the name, description, and default value for each argument
  argTuples = getArgumentDescriptions(tmClass.__init__)
  argTuples = argTuples[1:]  # Remove "self"

  # Get the names of the parameters to our own constructor and remove them
  init = TMRegion.__init__
  ourArgNames = [t[0] for t in getArgumentDescriptions(init)]
  # Also remove a few other names that aren't in our constructor but are
  #  computed automatically
  #ourArgNames += [
  #  "inputDimensions", # TODO: CHECK IF WE NEED TO DO THIS
  #]
  for argTuple in argTuples[:]:
    if argTuple[0] in ourArgNames:
      argTuples.remove(argTuple)

  # Build the dictionary of arguments
  if self:
    for argTuple in argTuples:
      argName = argTuple[0]
      if argName in kwargs:
        # Argument was provided
        argValue = kwargs.pop(argName)
      else:
        # Argument was not provided; use the default value if there is one, and
        #  raise an exception otherwise
        if len(argTuple) == 2:
          # No default value
          raise TypeError("Must provide value for '%s'" % argName)
        argValue = argTuple[2]
      # Set as an instance variable if "self" was passed in
      setattr(self, argName, argValue)

  return argTuples
Пример #4
0
def _buildArgs(tmClass, self=None, kwargs={}):
    """
  Get the default arguments from the function and assign as instance vars.

  Return a list of 3-tuples with (name, description, defaultValue) for each
    argument to the function.

  Assigns all arguments to the function as instance variables of TMRegion.
  If the argument was not provided, uses the default value.

  Pops any values from kwargs that go to the function.

  """
    # Get the name, description, and default value for each argument
    argTuples = getArgumentDescriptions(tmClass.__init__)
    argTuples = argTuples[1:]  # Remove "self"

    # Get the names of the parameters to our own constructor and remove them
    init = TMRegion.__init__
    ourArgNames = [t[0] for t in getArgumentDescriptions(init)]
    # Also remove a few other names that aren't in our constructor but are
    #  computed automatically
    #ourArgNames += [
    #  "inputDimensions", # TODO: CHECK IF WE NEED TO DO THIS
    #]
    for argTuple in argTuples[:]:
        if argTuple[0] in ourArgNames:
            argTuples.remove(argTuple)

    # Build the dictionary of arguments
    if self:
        for argTuple in argTuples:
            argName = argTuple[0]
            if argName in kwargs:
                # Argument was provided
                argValue = kwargs.pop(argName)
            else:
                # Argument was not provided; use the default value if there is one, and
                #  raise an exception otherwise
                if len(argTuple) == 2:
                    # No default value
                    raise TypeError("Must provide value for '%s'" % argName)
                argValue = argTuple[2]
            # Set as an instance variable if "self" was passed in
            setattr(self, argName, argValue)

    return argTuples
Пример #5
0
  def compute(self, inputs, outputs):
    """
    Run one iteration of TM's compute.

    Note that if the reset signal is True (1) we assume this iteration
    represents the *end* of a sequence. The output will contain the TM
    representation to this point and any history will then be reset. The output
    at the next compute will start fresh, presumably with bursting columns.
    """

    activeColumns = set(numpy.where(inputs["bottomUpIn"] == 1)[0])

    if "externalInput" in inputs:
      activeExternalCells = set(numpy.where(inputs["externalInput"] == 1)[0])
    else:
      activeExternalCells = None

    if "topDownIn" in inputs:
      activeApicalCells = set(numpy.where(inputs["topDownIn"] == 1)[0])
    else:
      activeApicalCells = None

    # Figure out if our class is one of the "general types"
    args = getArgumentDescriptions(self._tm.compute)
    if len(args) > 3:
      # General temporal memory
      self._tm.compute(activeColumns,
                       activeExternalCells=activeExternalCells,
                       activeApicalCells=activeApicalCells,
                       formInternalConnections=self.formInternalConnections,
                       learn=self.learningMode)
    else:
      # Plain old temporal memory
      self._tm.compute(activeColumns, learn=self.learningMode)

    # Normal temporal memory doesn't compute predictedActiveCells so we
    # always compute it explicitly
    self.activeState[:] = 0
    self.activeState[self._tm.getActiveCells()] = 1
    predictedActiveCells = self.activeState*self.previouslyPredictedCells

    self.previouslyPredictedCells[:] = 0
    self.previouslyPredictedCells[self._tm.getPredictiveCells()] = 1

    # Copy numpy values into the various outputs
    # outputs['bottomUpOut'][:] = self.activeState
    outputs['bottomUpOut'][:] = predictedActiveCells
    outputs['predictiveCells'][:] = self.previouslyPredictedCells
    outputs['predictedActiveCells'][:] = predictedActiveCells

    # Handle reset after current input has been processed
    if 'resetIn' in inputs:
      assert len(inputs['resetIn']) == 1
      if inputs['resetIn'][0] != 0:
        self.reset()
Пример #6
0
    def compute(self, inputs, outputs):
        """
    Run one iteration of TM's compute.

    Note that if the reset signal is True (1) we assume this iteration
    represents the *end* of a sequence. The output will contain the TM
    representation to this point and any history will then be reset. The output
    at the next compute will start fresh, presumably with bursting columns.
    """

        activeColumns = inputs["bottomUpIn"].nonzero()[0]

        if "externalInput" in inputs:
            activeExternalCells = inputs["externalInput"].nonzero()[0]
        else:
            activeExternalCells = ()

        if "topDownIn" in inputs:
            activeApicalCells = inputs["topDownIn"].nonzero()[0]
        else:
            activeApicalCells = ()

        # Figure out if our class is one of the "extended types"
        args = getArgumentDescriptions(self._tm.compute)
        if len(args) > 3:
            # Extended temporal memory
            self._tm.compute(
                activeColumns,
                activeCellsExternalBasal=activeExternalCells,
                activeCellsExternalApical=activeApicalCells,
                reinforceCandidatesExternalBasal=self.prevActiveExternalCells,
                reinforceCandidatesExternalApical=self.prevActiveApicalCells,
                growthCandidatesExternalBasal=self.prevActiveExternalCells,
                growthCandidatesExternalApical=self.prevActiveApicalCells,
                learn=self.learningMode)
            self.prevActiveExternalCells = activeExternalCells
            self.prevActiveApicalCells = activeApicalCells
        else:
            # Plain old temporal memory
            self._tm.compute(activeColumns, learn=self.learningMode)

        # Extract the active / predictive cells and put them into binary arrays.
        outputs["activeCells"][:] = 0
        outputs["activeCells"][self._tm.getActiveCells()] = 1
        outputs["predictedCells"][:] = 0
        outputs["predictedCells"][self.prevPredictiveCells] = 1
        outputs["predictedActiveCells"][:] = (outputs["activeCells"] *
                                              outputs["predictedActiveCells"])

        predictiveCells = self._tm.getPredictiveCells()
        outputs["predictiveCells"][:] = 0
        outputs["predictiveCells"][predictiveCells] = 0
        self.prevPredictiveCells = predictiveCells

        # Select appropriate output for bottomUpOut
        if self.defaultOutputType == "active":
            outputs["bottomUpOut"][:] = outputs["activeCells"]
        elif self.defaultOutputType == "predictive":
            outputs["bottomUpOut"][:] = outputs["predictiveCells"]
        elif self.defaultOutputType == "predictedActiveCells":
            outputs["bottomUpOut"][:] = outputs["predictedActiveCells"]
        else:
            raise Exception("Unknown outputType: " + self.defaultOutputType)

        # Handle reset after current input has been processed
        if "resetIn" in inputs:
            assert len(inputs["resetIn"]) == 1
            if inputs["resetIn"][0] != 0:
                self.reset()
Пример #7
0
  def compute(self, inputs, outputs):
    """
    Run one iteration of TM's compute.

    The guts of the compute are contained in the self._tmClass compute() call
    """

    # Handle reset input
    if 'resetIn' in inputs:
      assert len(inputs['resetIn']) == 1
      if inputs['resetIn'][0] != 0:
        self.reset()

    activeColumns = set(numpy.where(inputs["bottomUpIn"] == 1)[0])

    if "externalInput" in inputs:
      activeExternalCells = set(numpy.where(inputs["externalInput"] == 1)[0])
    else:
      activeExternalCells = None

    if "topDownIn" in inputs:
      activeApicalCells = set(numpy.where(inputs["topDownIn"] == 1)[0])
    else:
      activeApicalCells = None

    # Figure out if our class is one of the "general types"
    args = getArgumentDescriptions(self._tm.compute)
    if len(args) > 3:
      # General temporal memory
      self._tm.compute(activeColumns,
                       activeExternalCells=activeExternalCells,
                       activeApicalCells=activeApicalCells,
                       formInternalConnections=self.formInternalConnections,
                       learn=self.learningMode)
      predictedActiveCells = self._tm.predictedActiveCells
    else:
      # Plain old temporal memory
      self._tm.compute(activeColumns, learn=self.learningMode)
      # Normal temporal memory doesn't compute predictedActiveCells
      predictedActiveCells = self._tm.activeCells & self.previouslyPredictedCells
      self.previouslyPredictedCells = self._tm.predictiveCells


    # Set the various outputs

    # HACK HACK: temporary until accessors are in place.
    activeCells = list(self._tm.activeCells)
    if isinstance(activeCells[0], ConnectionsCell):
      activeCells = self._tm.getCellIndices(self._tm.activeCells)
      predictiveCells = self._tm.getCellIndices(self._tm.predictiveCells)
      predictedActiveCells = self._tm.getCellIndices(predictedActiveCells)
    else:
      predictiveCells = list(self._tm.predictiveCells)
      predictedActiveCells = list(predictedActiveCells)

    outputs['bottomUpOut'][:] = 0
    outputs['bottomUpOut'][activeCells] = 1

    outputs['predictiveCells'][:] = 0
    outputs['predictiveCells'][predictiveCells] = 1

    outputs['predictedActiveCells'][:] = 0
    outputs['predictedActiveCells'][predictedActiveCells] = 1
Пример #8
0
    def compute(self, inputs, outputs):
        """
    Run one iteration of TM's compute.

    The guts of the compute are contained in the self._tmClass compute() call
    """

        # Handle reset input
        if 'resetIn' in inputs:
            assert len(inputs['resetIn']) == 1
            if inputs['resetIn'][0] != 0:
                self.reset()

        activeColumns = set(numpy.where(inputs["bottomUpIn"] == 1)[0])

        if "externalInput" in inputs:
            activeExternalCells = set(
                numpy.where(inputs["externalInput"] == 1)[0])
        else:
            activeExternalCells = None

        if "topDownIn" in inputs:
            activeApicalCells = set(numpy.where(inputs["topDownIn"] == 1)[0])
        else:
            activeApicalCells = None

        # Figure out if our class is one of the "general types"
        args = getArgumentDescriptions(self._tm.compute)
        if len(args) > 3:
            # General temporal memory
            self._tm.compute(
                activeColumns,
                activeExternalCells=activeExternalCells,
                activeApicalCells=activeApicalCells,
                formInternalConnections=self.formInternalConnections,
                learn=self.learningMode)
            predictedActiveCells = self._tm.predictedActiveCells
        else:
            # Plain old temporal memory
            self._tm.compute(activeColumns, learn=self.learningMode)
            # Normal temporal memory doesn't compute predictedActiveCells
            predictedActiveCells = self._tm.activeCells & self.previouslyPredictedCells
            self.previouslyPredictedCells = self._tm.predictiveCells

        # Set the various outputs

        # HACK HACK: temporary until accessors are in place.
        activeCells = list(self._tm.activeCells)
        if isinstance(activeCells[0], ConnectionsCell):
            activeCells = self._tm.getCellIndices(self._tm.activeCells)
            predictiveCells = self._tm.getCellIndices(self._tm.predictiveCells)
            predictedActiveCells = self._tm.getCellIndices(
                predictedActiveCells)
        else:
            predictiveCells = list(self._tm.predictiveCells)
            predictedActiveCells = list(predictedActiveCells)

        outputs['bottomUpOut'][:] = 0
        outputs['bottomUpOut'][activeCells] = 1

        outputs['predictiveCells'][:] = 0
        outputs['predictiveCells'][predictiveCells] = 1

        outputs['predictedActiveCells'][:] = 0
        outputs['predictedActiveCells'][predictedActiveCells] = 1
Пример #9
0
def _buildArgs(f, self=None, kwargs={}):
  """
  Get the default arguments from the function and assign as instance vars.

  Return a list of 3-tuples with (name, description, defaultValue) for each
    argument to the function.

  Assigns all arguments to the function as instance variables of SPRegion.
  If the argument was not provided, uses the default value.

  Pops any values from kwargs that go to the function.

  """
  # Get the name, description, and default value for each argument
  argTuples = getArgumentDescriptions(f)
  argTuples = argTuples[1:]  # Remove 'self'

  # Get the names of the parameters to our own constructor and remove them
  # Check for _originial_init first, because if LockAttributesMixin is used,
  #  __init__'s signature will be just (self, *args, **kw), but
  #  _original_init is created with the original signature
  #init = getattr(self, '_original_init', self.__init__)
  init = SPRegion.__init__
  ourArgNames = [t[0] for t in getArgumentDescriptions(init)]
  # Also remove a few other names that aren't in our constructor but are
  #  computed automatically (e.g. numberOfCols for the TP)
  # TODO: where does numberOfCols come into SPRegion?
  ourArgNames += [
    'numberOfCols',
  ]
  for argTuple in argTuples[:]:
    if argTuple[0] in ourArgNames:
      argTuples.remove(argTuple)

  # Build the dictionary of arguments
  if self:
    for argTuple in argTuples:
      argName = argTuple[0]
      if argName in kwargs:
        # Argument was provided
        argValue = kwargs.pop(argName)
      else:
        # Argument was not provided; use the default value if there is one, and
        #  raise an exception otherwise
        if len(argTuple) == 2:
          # No default value
          raise TypeError("Must provide value for '%s'" % argName)
        argValue = argTuple[2]
      # Set as an instance variable if 'self' was passed in
      setattr(self, argName, argValue)
      
  # Translate some parameters for backward compatibility
  if kwargs.has_key('numActivePerInhArea'):
    setattr(self, 'numActiveColumnsPerInhArea', kwargs['numActivePerInhArea'])
    kwargs.pop('numActivePerInhArea')
    
  if kwargs.has_key('coincInputPoolPct'):
    setattr(self, 'potentialPct', kwargs['coincInputPoolPct'])
    kwargs.pop('coincInputPoolPct')

  return argTuples
Пример #10
0
    def compute(self, inputs, outputs):
        """
    Run one iteration of TM's compute.

    Note that if the reset signal is True (1) we assume this iteration
    represents the *end* of a sequence. The output will contain the TM
    representation to this point and any history will then be reset. The output
    at the next compute will start fresh, presumably with bursting columns.
    """

        activeColumns = set(numpy.where(inputs["bottomUpIn"] == 1)[0])

        if "externalInput" in inputs:
            activeExternalCells = set(
                numpy.where(inputs["externalInput"] == 1)[0])
        else:
            activeExternalCells = None

        if "topDownIn" in inputs:
            activeApicalCells = set(numpy.where(inputs["topDownIn"] == 1)[0])
        else:
            activeApicalCells = None

        # Figure out if our class is one of the "general types"
        args = getArgumentDescriptions(self._tm.compute)
        if len(args) > 3:
            # Extended temporal memory
            self._tm.compute(
                activeColumns,
                activeExternalCells=activeExternalCells,
                activeApicalCells=activeApicalCells,
                formInternalConnections=self.formInternalConnections,
                learn=self.learningMode)
        else:
            # Plain old temporal memory
            self._tm.compute(activeColumns, learn=self.learningMode)

        # Normal temporal memory doesn't compute predictedActiveCells so we
        # always compute it explicitly
        self.activeState[:] = 0
        self.activeState[self._tm.getActiveCells()] = 1
        predictedActiveCells = self.activeState * self.previouslyPredictedCells

        self.previouslyPredictedCells[:] = 0
        self.previouslyPredictedCells[self._tm.getPredictiveCells()] = 1

        # Copy numpy values into the various outputs
        outputs["activeCells"][:] = self.activeState
        outputs["predictiveCells"][:] = self.previouslyPredictedCells
        outputs["predictedActiveCells"][:] = predictedActiveCells

        # Select appropriate output for bottomUpOut
        if self.defaultOutputType == "active":
            outputs["bottomUpOut"][:] = self.activeState
        elif self.defaultOutputType == "predictive":
            outputs["bottomUpOut"][:] = self.previouslyPredictedCells
        elif self.defaultOutputType == "predictedActiveCells":
            outputs["bottomUpOut"][:] = predictedActiveCells
        else:
            raise Exception("Unknown outputType: " + self.defaultOutputType)

        # Handle reset after current input has been processed
        if "resetIn" in inputs:
            assert len(inputs["resetIn"]) == 1
            if inputs["resetIn"][0] != 0:
                self.reset()
Пример #11
0
    def compute(self, inputs, outputs):
        """
    Run one iteration of TM's compute.

    Note that if the reset signal is True (1) we assume this iteration
    represents the *end* of a sequence. The output will contain the TM
    representation to this point and any history will then be reset. The output
    at the next compute will start fresh, presumably with bursting columns.
    """

        activeColumns = set(numpy.where(inputs["bottomUpIn"] == 1)[0])

        if "externalInput" in inputs:
            activeExternalCells = set(
                numpy.where(inputs["externalInput"] == 1)[0])
        else:
            activeExternalCells = None

        if "topDownIn" in inputs:
            activeApicalCells = set(numpy.where(inputs["topDownIn"] == 1)[0])
        else:
            activeApicalCells = None

        # Figure out if our class is one of the "general types"
        args = getArgumentDescriptions(self._tm.compute)
        if len(args) > 3:
            # General temporal memory
            self._tm.compute(
                activeColumns,
                activeExternalCells=activeExternalCells,
                activeApicalCells=activeApicalCells,
                formInternalConnections=self.formInternalConnections,
                learn=self.learningMode)
            predictedActiveCells = self._tm.predictedActiveCells
        else:
            # Plain old temporal memory
            self._tm.compute(activeColumns, learn=self.learningMode)
            # Normal temporal memory doesn't compute predictedActiveCells
            predictedActiveCells = self._tm.activeCells & self.previouslyPredictedCells
            self.previouslyPredictedCells = self._tm.predictiveCells

        # Set the various outputs

        # HACK HACK: temporary until accessors are in place.
        activeCells = list(self._tm.activeCells)
        if isinstance(activeCells[0], ConnectionsCell):
            activeCells = self._tm.getCellIndices(self._tm.activeCells)
            predictiveCells = self._tm.getCellIndices(self._tm.predictiveCells)
            predictedActiveCells = self._tm.getCellIndices(
                predictedActiveCells)
        else:
            predictiveCells = list(self._tm.predictiveCells)
            predictedActiveCells = list(predictedActiveCells)

        outputs['bottomUpOut'][:] = 0
        outputs['bottomUpOut'][activeCells] = 1

        outputs['predictiveCells'][:] = 0
        outputs['predictiveCells'][predictiveCells] = 1

        outputs['predictedActiveCells'][:] = 0
        outputs['predictedActiveCells'][predictedActiveCells] = 1

        # Handle reset after current input has been processed
        if 'resetIn' in inputs:
            assert len(inputs['resetIn']) == 1
            if inputs['resetIn'][0] != 0:
                self.reset()
Пример #12
0
  def compute(self, inputs, outputs):
    """
    Run one iteration of TM's compute.

    Note that if the reset signal is True (1) we assume this iteration
    represents the *end* of a sequence. The output will contain the TM
    representation to this point and any history will then be reset. The output
    at the next compute will start fresh, presumably with bursting columns.
    """

    activeColumns = set(numpy.where(inputs["bottomUpIn"] == 1)[0])

    if "externalInput" in inputs:
      activeExternalCells = set(numpy.where(inputs["externalInput"] == 1)[0])
    else:
      activeExternalCells = None

    if "topDownIn" in inputs:
      activeApicalCells = set(numpy.where(inputs["topDownIn"] == 1)[0])
    else:
      activeApicalCells = None

    # Figure out if our class is one of the "general types"
    args = getArgumentDescriptions(self._tm.compute)
    if len(args) > 3:
      # General temporal memory
      self._tm.compute(activeColumns,
                       activeExternalCells=activeExternalCells,
                       activeApicalCells=activeApicalCells,
                       formInternalConnections=self.formInternalConnections,
                       learn=self.learningMode)
      predictedActiveCells = self._tm.predictedActiveCells
    else:
      # Plain old temporal memory
      self._tm.compute(activeColumns, learn=self.learningMode)
      # Normal temporal memory doesn't compute predictedActiveCells
      predictedActiveCells = self._tm.activeCells & self.previouslyPredictedCells
      self.previouslyPredictedCells = self._tm.predictiveCells


    # Set the various outputs

    # HACK HACK: temporary until accessors are in place.
    activeCells = list(self._tm.activeCells)
    if isinstance(activeCells[0], ConnectionsCell):
      activeCells = self._tm.getCellIndices(self._tm.activeCells)
      predictiveCells = self._tm.getCellIndices(self._tm.predictiveCells)
      predictedActiveCells = self._tm.getCellIndices(predictedActiveCells)
    else:
      predictiveCells = list(self._tm.predictiveCells)
      predictedActiveCells = list(predictedActiveCells)

    outputs['bottomUpOut'][:] = 0
    outputs['bottomUpOut'][activeCells] = 1

    outputs['predictiveCells'][:] = 0
    outputs['predictiveCells'][predictiveCells] = 1

    outputs['predictedActiveCells'][:] = 0
    outputs['predictedActiveCells'][predictedActiveCells] = 1


    # Handle reset after current input has been processed
    if 'resetIn' in inputs:
      assert len(inputs['resetIn']) == 1
      if inputs['resetIn'][0] != 0:
        self.reset()
Пример #13
0
    def compute(self, inputs, outputs):
        """
    Run one iteration of TM's compute.

    Note that if the reset signal is True (1) we assume this iteration
    represents the *end* of a sequence. The output will contain the TM
    representation to this point and any history will then be reset. The output
    at the next compute will start fresh, presumably with bursting columns.
    """

        activeColumns = inputs["bottomUpIn"].nonzero()[0]

        if "externalInput" in inputs:
            activeExternalCells = inputs["externalInput"].nonzero()[0]
        else:
            activeExternalCells = ()

        if "topDownIn" in inputs:
            activeApicalCells = inputs["topDownIn"].nonzero()[0]
        else:
            activeApicalCells = ()

        # Figure out if our class is one of the "extended types"
        args = getArgumentDescriptions(self._tm.compute)
        if len(args) > 3:
            # Extended temporal memory
            self._tm.compute(
                activeColumns,
                activeCellsExternalBasal=activeExternalCells,
                activeCellsExternalApical=activeApicalCells,
                reinforceCandidatesExternalBasal=self.prevActiveExternalCells,
                reinforceCandidatesExternalApical=self.prevActiveApicalCells,
                growthCandidatesExternalBasal=self.prevActiveExternalCells,
                growthCandidatesExternalApical=self.prevActiveApicalCells,
                learn=self.learningMode,
            )
            self.prevActiveExternalCells = activeExternalCells
            self.prevActiveApicalCells = activeApicalCells
        else:
            # Plain old temporal memory
            self._tm.compute(activeColumns, learn=self.learningMode)

        # Extract the active / predictive cells and put them into binary arrays.
        outputs["activeCells"][:] = 0
        outputs["activeCells"][self._tm.getActiveCells()] = 1
        outputs["predictedCells"][:] = 0
        outputs["predictedCells"][self.prevPredictiveCells] = 1
        outputs["predictedActiveCells"][:] = outputs["activeCells"] * outputs["predictedActiveCells"]

        predictiveCells = self._tm.getPredictiveCells()
        outputs["predictiveCells"][:] = 0
        outputs["predictiveCells"][predictiveCells] = 0
        self.prevPredictiveCells = predictiveCells

        # Select appropriate output for bottomUpOut
        if self.defaultOutputType == "active":
            outputs["bottomUpOut"][:] = outputs["activeCells"]
        elif self.defaultOutputType == "predictive":
            outputs["bottomUpOut"][:] = outputs["predictiveCells"]
        elif self.defaultOutputType == "predictedActiveCells":
            outputs["bottomUpOut"][:] = outputs["predictedActiveCells"]
        else:
            raise Exception("Unknown outputType: " + self.defaultOutputType)

        # Handle reset after current input has been processed
        if "resetIn" in inputs:
            assert len(inputs["resetIn"]) == 1
            if inputs["resetIn"][0] != 0:
                self.reset()