Ejemplo n.º 1
0
# ----------------------------------------------------------------------

import random
import copy
import numpy
from nupic.bindings.algorithms import SpatialPooler
# Uncomment below line to use python SP
# from nupic.algorithms.spatial_pooler import SpatialPooler
from nupic.bindings.math import GetNTAReal
from htmresearch.frameworks.union_temporal_pooling.activation.excite_functions.excite_functions_all import (
    LogisticExciteFunction, FixedExciteFunction)

from htmresearch.frameworks.union_temporal_pooling.activation.decay_functions.decay_functions_all import (
    ExponentialDecayFunction, NoDecayFunction)

REAL_DTYPE = GetNTAReal()
UINT_DTYPE = "uint32"
_TIE_BREAKER_FACTOR = 0.000001


class UnionTemporalPooler(SpatialPooler):
    """
  Experimental Union Temporal Pooler Python implementation. The Union Temporal
  Pooler builds a "union SDR" of the most recent sets of active columns. It is
  driven by active-cell input and, more strongly, by predictive-active cell
  input. The latter is more likely to produce active columns. Such winning
  columns will also tend to persist longer in the union SDR.
  """
    def __init__(
            self,
            # union_temporal_pooler.py parameters
def trainNetwork(net, dataPath="mnist/small_training",
                 networkFile="mnist_net.nta"):
  # Some stuff we will need later
  sensor = net.regions['sensor']
  imSensor = sensor.getSelf()
  sp = net.regions["SP"]
  pysp = sp.getSelf()
  classifier = net.regions['classifier']
  dutyCycles = numpy.zeros(DEFAULT_SP_PARAMS['columnCount'], dtype=GetNTAReal())

  # # Plot untrained permanences
  # plotPermanences(network = net)

  print "============= Loading training images ================="
  t1 = time.time()
  sensor.executeCommand(["loadMultipleImages", dataPath])
  numTrainingImages = sensor.getParameter('numImages')
  start = time.time()
  print "Load time for training images:",start-t1
  print "Number of training images",numTrainingImages

  # First train just the SP
  print "============= SP training ================="
  imSensor.setParameter('explorer',0, ['RandomFlash', {'seed':0}])
  classifier.setParameter('inferenceMode', 0)
  classifier.setParameter('learningMode', 0)
  sp.setParameter('learningMode', 1)
  sp.setParameter('inferenceMode', 1)
  nTrainingIterations = 1*numTrainingImages
  for i in range(nTrainingIterations):
    net.run(1)
    dutyCycles += pysp._spatialPoolerOutput
    if i%(nTrainingIterations/10)== 0:
      print "Iteration",i,"Category:",sensor.getOutputData('categoryOut')

  # Now train just the classifier sequentially on all training images
  print "============= Classifier training ================="
  sensor.setParameter('explorer','Flash')
  classifier.setParameter('inferenceMode', 0)
  classifier.setParameter('learningMode', 1)
  sp.setParameter('learningMode', 0)
  sp.setParameter('inferenceMode', 1)
  for i in range(numTrainingImages):
    net.run(1)
    if i%(numTrainingImages/10)== 0:
      print "Iteration",i,"Category:",sensor.getOutputData('categoryOut')

  # Save the trained network
  net.save(networkFile)

  # Print various statistics
  print "============= Training statistics ================="
  print "Training time:",time.time() - start
  tenPct= nTrainingIterations/10
  print "My duty cycles:",fdrutilities.numpyStr(dutyCycles, format="%g")
  print "Number of nonzero duty cycles:",len(dutyCycles.nonzero()[0])
  print "Mean/Max duty cycles:",dutyCycles.mean(), dutyCycles.max()
  print "Number of columns that won for > 10% patterns",\
            (dutyCycles>tenPct).sum()
  print "Number of columns that won for > 20% patterns",\
            (dutyCycles>2*tenPct).sum()
  print "Num categories learned",classifier.getParameter('categoryCount')
  print "Number of patterns stored",classifier.getParameter('patternCount')

  return net
Ejemplo n.º 3
0
    TemporalMemoryInspectMixin)

from sensorimotor.one_d_world import OneDWorld
from sensorimotor.one_d_universe import OneDUniverse
from sensorimotor.random_one_d_agent import RandomOneDAgent
from sensorimotor.general_temporal_memory import (GeneralTemporalMemory)
"""

This program forms the simplest test of sensorimotor sequence inference
with 1D patterns. We present a sequence from a single 1D pattern. The
TM is initialized with multiple cells per column but should form a
first order representation of this sequence.

"""

realDType = GetNTAReal()


# Mixin class for TM statistics
class TMI(TemporalMemoryInspectMixin, GeneralTemporalMemory):
    pass


def feedTM(tm, length, agents, verbosity=0, learn=True):
    """Feed the given sequence to the TM instance."""
    tm.mmClearHistory()
    for agent in agents:
        tm.reset()
        if verbosity > 0:
            print "\nGenerating sequence for world:", agent.world.toString()
        sensorSequence, motorSequence, sensorimotorSequence = (
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------

"""This script contains helper routines for testing algorithms"""

import time
import traceback

import numpy
from nupic.bindings.algorithms import SpatialPooler as CPPSpatialPooler
from nupic.bindings.math import GetNTAReal, Random as NupicRandom
from nupic.research.spatial_pooler import SpatialPooler as PySpatialPooler


realType = GetNTAReal()
uintType = "uint32"


def getNumpyRandomGenerator(seed = None):
  """
  Return a numpy random number generator with the given seed.
  If seed is None, set it randomly based on time. Regardless we log
  the actual seed and stack trace so that test failures are replicable.
  """
  if seed is None:
    seed = int((time.time()%10000)*10)
  print "Numpy seed set to:", seed, "called by",
  callStack = traceback.extract_stack(limit=3)
  print callStack[0][2], "line", callStack[0][1], "->", callStack[1][2]
  return numpy.random.RandomState(seed)
Ejemplo n.º 5
0
# along with this program.  If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Module of statistical data structures and functions used in learning algorithms
and for analysis of HTM network inputs and outputs.
"""

import random

import numpy

from nupic.bindings.math import GetNTAReal, SparseMatrix

dtype = GetNTAReal()


def pickByDistribution(distribution, r=None):
    """
  Pick a value according to the provided distribution.

  Example:

  ::

    pickByDistribution([.2, .1])

  Returns 0 two thirds of the time and 1 one third of the time.

  :param distribution: Probability distribution. Need not be normalized.
def trainNetwork(net, dataDir, networkFile="mnist_net.nta"):
    # Some stuff we will need later
    sensor = net.regions["sensor"]
    sp = net.regions["SP"]
    pysp = sp.getSelf()
    classifier = net.regions["classifier"]
    dutyCycles = numpy.zeros(DEFAULT_SP_PARAMS["columnCount"],
                             dtype=GetNTAReal())

    print "============= Loading training images ================="
    t1 = time.time()
    sensor.executeCommand(
        ["loadMultipleImages",
         os.path.join(dataDir, "training")])
    numTrainingImages = sensor.getParameter("numImages")
    start = time.time()
    print "Load time for training images:", start - t1
    print "Number of training images", numTrainingImages

    # First train just the SP
    print "============= SP training ================="
    classifier.setParameter("inferenceMode", 0)
    classifier.setParameter("learningMode", 0)
    sp.setParameter("learningMode", 1)
    sp.setParameter("inferenceMode", 0)
    nTrainingIterations = numTrainingImages
    for i in range(nTrainingIterations):
        net.run(1)
        dutyCycles += pysp._spatialPoolerOutput
        if i % (nTrainingIterations / 100) == 0:
            print "Iteration", i, "Category:", sensor.getOutputData(
                "categoryOut")

    # Now train just the classifier sequentially on all training images
    print "============= Classifier training ================="
    sensor.setParameter("explorer", yaml.dump(["Flash"]))
    classifier.setParameter("inferenceMode", 0)
    classifier.setParameter("learningMode", 1)
    sp.setParameter("learningMode", 0)
    sp.setParameter("inferenceMode", 1)
    for i in range(numTrainingImages):
        net.run(1)
        if i % (numTrainingImages / 100) == 0:
            print "Iteration", i, "Category:", sensor.getOutputData(
                "categoryOut")

    # Save the trained network
    net.save(networkFile)

    # Print various statistics
    print "============= Training statistics ================="
    print "Training time:", time.time() - start
    tenPct = nTrainingIterations / 10
    print "My duty cycles:", numpy.array_str(dutyCycles)
    print "Number of nonzero duty cycles:", len(dutyCycles.nonzero()[0])
    print "Mean/Max duty cycles:", dutyCycles.mean(), dutyCycles.max()
    print "Number of columns that won for > 10% patterns",\
              (dutyCycles>tenPct).sum()
    print "Number of columns that won for > 20% patterns",\
              (dutyCycles>2*tenPct).sum()
    print "Num categories learned", classifier.getParameter("categoryCount")
    print "Number of patterns stored", classifier.getParameter("patternCount")

    return net