Пример #1
0
def createEncoder():
    consumptionEncoder = ScalarEncoder(21, 0, 1024, n=50, name="consumption")
    timeEncoder = DateEncoder(timeOfDay=(21,9.5), name="timestamp_timeOfDay")
    encoder = MultiEncoder()
    encoder.addEncoder("consumption", consumptionEncoder)
    encoder.addEncoder("timestamp", timeEncoder)
    return encoder
Пример #2
0
def createEncoder():
  """Create the encoder instance for our test and return it."""
  consumption_encoder = ScalarEncoder(21, 0.0, 100.0, n=50, name="consumption",
      clipInput=True)
  time_encoder = DateEncoder(timeOfDay=(21, 9.5), name="timestamp_timeOfDay")

  encoder = MultiEncoder()
  encoder.addEncoder("consumption", consumption_encoder)
  encoder.addEncoder("timestamp", time_encoder)

  return encoder
def create_network():
    network = Network()

    m_sensor = network.addRegion("Measurement", 'ScalarSensor',
                                 json.dumps(_SCALAR_ENCODER))
    dt_sensor = network.addRegion("DT", 'py.PluggableEncoderSensor', "")
    dt_sensor.getSelf().encoder = DateEncoder(**_DATE_ENCODER)

    # Add a SPRegion, a region containing a spatial pooler
    scalar_n = m_sensor.getParameter('n')
    dt_n = dt_sensor.getSelf().encoder.getWidth()
    _SP_PARAMS["inputWidth"] = scalar_n + dt_n
    network.addRegion("sp", "py.SPRegion", json.dumps(_SP_PARAMS))

    # Input to the Spatial Pooler
    network.link("Measurement", "sp", "UniformLink", "")
    network.link("DT", "sp", "UniformLink", "")

    # Add a TPRegion, a region containing a Temporal Memory
    network.addRegion("tm", "py.TMRegion", json.dumps(_TM_PARAMS))

    # Set up links
    network.link("sp", "tm", "UniformLink", "")
    network.link("tm",
                 "sp",
                 "UniformLink",
                 "",
                 srcOutput="topDownOut",
                 destInput="topDownIn")

    network.regions['sp'].setParameter("learningMode", True)
    network.regions['sp'].setParameter("anomalyMode", False)

    # network.regions['tm'].setParameter("topDownMode", True)  # check this

    # Make sure learning is enabled (this is the default)
    network.regions['tm'].setParameter("learningMode", True)
    # Enable anomalyMode so the tm calculates anomaly scores
    network.regions['tm'].setParameter("anomalyMode", True)
    # Enable inference mode to be able to get predictions
    network.regions['tm'].setParameter("inferenceMode", True)

    # TODO: enable all inferences
    return network
Пример #4
0
def createNetwork():
    network = Network()

    #
    # Sensors
    #

    # C++
    consumptionSensor = network.addRegion(
        'consumptionSensor', 'ScalarSensor',
        json.dumps({
            'n': 120,
            'w': 21,
            'minValue': 0.0,
            'maxValue': 100.0,
            'clipInput': True
        }))

    # Python
    timestampSensor = network.addRegion("timestampSensor",
                                        'py.PluggableEncoderSensor', "")
    timestampSensor.getSelf().encoder = DateEncoder(timeOfDay=(21, 9.5),
                                                    name="timestamp_timeOfDay")

    #
    # Add a SPRegion, a region containing a spatial pooler
    #
    consumptionEncoderN = consumptionSensor.getParameter('n')
    timestampEncoderN = timestampSensor.getSelf().encoder.getWidth()
    inputWidth = consumptionEncoderN + timestampEncoderN

    network.addRegion(
        "sp", "py.SPRegion",
        json.dumps({
            "spatialImp": "cpp",
            "globalInhibition": 1,
            "columnCount": 2048,
            "inputWidth": inputWidth,
            "numActiveColumnsPerInhArea": 40,
            "seed": 1956,
            "potentialPct": 0.8,
            "synPermConnected": 0.1,
            "synPermActiveInc": 0.0001,
            "synPermInactiveDec": 0.0005,
            "boostStrength": 0.0,
        }))

    #
    # Input to the Spatial Pooler
    #
    network.link("consumptionSensor", "sp", "UniformLink", "")
    network.link("timestampSensor", "sp", "UniformLink", "")

    #
    # Add a TPRegion, a region containing a Temporal Memory
    #
    network.addRegion(
        "tm", "py.TMRegion",
        json.dumps({
            "columnCount": 2048,
            "cellsPerColumn": 32,
            "inputWidth": 2048,
            "seed": 1960,
            "temporalImp": "cpp",
            "newSynapseCount": 20,
            "maxSynapsesPerSegment": 32,
            "maxSegmentsPerCell": 128,
            "initialPerm": 0.21,
            "permanenceInc": 0.1,
            "permanenceDec": 0.1,
            "globalDecay": 0.0,
            "maxAge": 0,
            "minThreshold": 9,
            "activationThreshold": 12,
            "outputType": "normal",
            "pamLength": 3,
        }))

    network.link("sp", "tm", "UniformLink", "")
    network.link("tm",
                 "sp",
                 "UniformLink",
                 "",
                 srcOutput="topDownOut",
                 destInput="topDownIn")

    # Enable anomalyMode so the tm calculates anomaly scores
    network.regions['tm'].setParameter("anomalyMode", True)
    # Enable inference mode to be able to get predictions
    network.regions['tm'].setParameter("inferenceMode", True)

    return network
def createNetwork():
  # c network: create Network instance
  network = Network()

  # --------------------------------------------------
  # Add sensors to network

  # c param_f_consumptionSensor: parameter for consumptionSensor
  param_f_consumptionSensor={
    'n': 120,
    'w': 21,
    'minValue': 0.0,
    'maxValue': 100.0,
    'clipInput': True}

  # c jparam_f_cs: json param for consumptionSensor
  jparam_f_cs=json.dumps(param_f_consumptionSensor)

  # C++
  # c consumptionSensor: add consumptionSensor region into network
  consumptionSensor = network.addRegion(
    'consumptionSensor', 'ScalarSensor', jparam_f_cs)

  # --------------------------------------------------
  # Python
  # c timestampSensor: add timestampSensor region into network
  timestampSensor = network.addRegion(
    "timestampSensor",'py.PluggableEncoderSensor', "")

  # c date_encoder: create date encoder
  date_encoder=DateEncoder(timeOfDay=(21, 9.5), name="timestamp_timeOfDay")

  # c date_encoder: assing date encoder into timestampSensor
  timestampSensor.getSelf().encoder = date_encoder

  # --------------------------------------------------
  # c consumptionEncoderN: get number of bits "n" from consumptionSensor
  consumptionEncoderN = consumptionSensor.getParameter('n')
  # print("consumptionEncoderN",consumptionEncoderN)
  # ('consumptionEncoderN', 120)

  # print("timestampSensor.getSelf()",timestampSensor.getSelf())
  # <nupic.regions.pluggable_encoder_sensor.PluggableEncoderSensor object at 0x7fa428bf31d0>

  # c encoder_of_tss: encoder of timestampSensor
  encoder_of_tss=timestampSensor.getSelf().encoder

  # c timestampEncoderN: width of encoder of timestampSensor
  timestampEncoderN = encoder_of_tss.getWidth()
  # print("timestampEncoderN",timestampEncoderN)
  # ('timestampEncoderN', 54)

  # c inputWidth: width of input
  inputWidth = consumptionEncoderN + timestampEncoderN
  # print("inputWidth",inputWidth)
  # ('inputWidth', 174)

  # --------------------------------------------------
  # c param_f_SP: parameter for spatial pooler
  param_f_SP={
    # c spatialImp: spatial pooler implementation in C++
    "spatialImp": "cpp",
    # c globalInhibition: 1 -> on
    "globalInhibition": 1,
    "columnCount": 2048,
    "inputWidth": inputWidth,
    # c numActiveColumnsPerInhArea: number of active columns per inhibition area
    "numActiveColumnsPerInhArea": 40,
    "seed": 1956,
    # c potentialPct: potential pool percent
    "potentialPct": 0.8,
    # c "synPermConnected: synaptic permanence connected
    "synPermConnected": 0.1,
    # c synPermActiveInc: synaptic permanence active increment
    "synPermActiveInc": 0.0001,
    # c synPermInactiveDec: synaptic permanence inactive decrement
    "synPermInactiveDec": 0.0005,
    "boostStrength": 0.0,}

  # c param_f_SP_j: parameter for spatial pooler in JSON
  param_f_SP_j=json.dumps(param_f_SP)

  # c Add "SPRegion" into network
  # SPRegion can contain spatial pooler
  network.addRegion("sp", "py.SPRegion", param_f_SP_j)

  # --------------------------------------------------
  # Link each configured one in network
  network.link("consumptionSensor", "sp", "UniformLink", "")
  network.link("timestampSensor", "sp", "UniformLink", "")

  # --------------------------------------------------
  # c param_f_TM: parameter for temporal memory learning algorithm
  param_f_TM={
    "columnCount": 2048,
    "cellsPerColumn": 32,
    "inputWidth": 2048,
    "seed": 1960,
    "temporalImp": "cpp",
    "newSynapseCount": 20,
    # c maxSynapsesPerSegment: maximum number of synapses per segment
    "maxSynapsesPerSegment": 32,
    # c maxSegmentsPerCell: maximum number of segments per cell
    "maxSegmentsPerCell": 128,
    # c initialPerm: initial permanence value for newly created synapses
    "initialPerm": 0.21,
    # c permanenceInc: active synapses get their permanence counts incremented by this value
    "permanenceInc": 0.1,
    # c permanenceDec: all other synapses get their permanence counts decremented by this value
    "permanenceDec": 0.1,
    "globalDecay": 0.0,
    "maxAge": 0,
    "minThreshold": 9,
    # c activationThreshold: if "number of active connected synapses" on segment is 
    # c activationThreshold: at least this threshold, segment is said to be active
    "activationThreshold": 12,
    "outputType": "normal",
    "pamLength": 3,}
  
  # c param_f_TM_j: parameter for temporal memory learning algorithm in JSON
  param_f_TM_j=json.dumps(param_f_TM)

  # Add TMRegion into network 
  # TMRegion is region containing "Temporal Memory Learning algorithm"
  network.addRegion("tm", "py.TMRegion", param_f_TM_j)

  # --------------------------------------------------
  network.link("sp", "tm", "UniformLink", "")
  network.link("tm", "sp", "UniformLink", "", srcOutput="topDownOut", destInput="topDownIn")

  # --------------------------------------------------
  # Enable anomalyMode so TM calculates anomaly scores
  network.regions['tm'].setParameter("anomalyMode", True)
  
  # Enable inference mode to be able to get predictions
  network.regions['tm'].setParameter("inferenceMode", True)

  return network
Пример #6
0
def getDescription(datasets):
    encoder = MultiEncoder()
    encoder.addEncoder("date", DateEncoder(timeOfDay=3))
    encoder.addEncoder("amount", LogEncoder(name="amount", maxval=1000))
    for i in xrange(0, nRandomFields):
        s = ScalarEncoder(name="scalar",
                          minval=0,
                          maxval=randomFieldWidth,
                          resolution=1,
                          w=3)
        encoder.addEncoder("random%d" % i, s)

    dataSource = FunctionSource(
        generateFunction,
        dict(nRandomFields=nRandomFields, randomFieldWidth=randomFieldWidth))

    inputShape = (1, encoder.getWidth())

    # Layout the coincidences vertically stacked on top of each other, each
    # looking at the entire input field.
    coincidencesShape = (nCoincidences, 1)
    # TODO: why do we need input border?
    inputBorder = inputShape[1] / 2
    if inputBorder * 2 >= inputShape[1]:
        inputBorder -= 1

    nodeParams = dict()

    spParams = dict(
        commonDistributions=0,
        inputShape=inputShape,
        inputBorder=inputBorder,
        coincidencesShape=coincidencesShape,
        coincInputRadius=inputShape[1] / 2,
        coincInputPoolPct=0.75,
        gaussianDist=0,
        localAreaDensity=0.10,
        # localAreaDensity = 0.04,
        numActivePerInhArea=-1,
        dutyCyclePeriod=1000,
        stimulusThreshold=5,
        synPermInactiveDec=0.08,
        # synPermInactiveDec=0.02,
        synPermActiveInc=0.02,
        synPermActiveSharedDec=0.0,
        synPermOrphanDec=0.0,
        minPctDutyCycleBeforeInh=0.05,
        # minPctDutyCycleAfterInh = 0.1,
        # minPctDutyCycleBeforeInh = 0.05,
        minPctDutyCycleAfterInh=0.05,
        # minPctDutyCycleAfterInh = 0.4,
        seed=1,
    )

    otherParams = dict(
        disableTemporal=1,
        trainingStep='spatial',
    )

    nodeParams.update(spParams)
    nodeParams.update(otherParams)

    def mySetupCallback(experiment):
        print "Setup function called"

    description = dict(
        options=dict(logOutputsDuringInference=False, ),
        network=dict(sensorDataSource=dataSource,
                     sensorEncoder=encoder,
                     CLAType="py.CLARegion",
                     CLAParams=nodeParams,
                     classifierType=None,
                     classifierParams=None),

        # step
        spTrain=dict(
            name="phase1",
            setup=mySetupCallback,
            iterationCount=5000,
            #iter=displaySPCoincidences(100),
            finish=printSPCoincidences()),
        tpTrain=None,  # same format as sptrain if non-empty
        infer=None,  # same format as sptrain if non-empty
    )

    return description
def createTemporalAnomaly_chemical(recordParams, spatialParams, temporalParams,
                                   verbosity):

    inputFilePath = recordParams["inputFilePath"]
    scalarEncoder1Args = recordParams["scalarEncoder1Args"]
    scalarEncoder2Args = recordParams["scalarEncoder2Args"]
    scalarEncoder3Args = recordParams["scalarEncoder3Args"]
    scalarEncoder4Args = recordParams["scalarEncoder4Args"]
    scalarEncoder5Args = recordParams["scalarEncoder5Args"]
    scalarEncoder6Args = recordParams["scalarEncoder6Args"]
    scalarEncoder7Args = recordParams["scalarEncoder7Args"]
    dateEncoderArgs = recordParams["dateEncoderArgs"]

    scalarEncoder1 = ScalarEncoder(**scalarEncoder1Args)
    scalarEncoder2 = ScalarEncoder(**scalarEncoder2Args)
    scalarEncoder3 = ScalarEncoder(**scalarEncoder3Args)
    scalarEncoder4 = ScalarEncoder(**scalarEncoder4Args)
    scalarEncoder5 = ScalarEncoder(**scalarEncoder5Args)
    scalarEncoder6 = ScalarEncoder(**scalarEncoder6Args)
    scalarEncoder7 = ScalarEncoder(**scalarEncoder7Args)
    dateEncoder = DateEncoder(**dateEncoderArgs)

    encoder = MultiEncoder()
    encoder.addEncoder(scalarEncoder1Args["name"], scalarEncoder1)
    encoder.addEncoder(scalarEncoder2Args["name"], scalarEncoder2)
    encoder.addEncoder(scalarEncoder3Args["name"], scalarEncoder3)
    encoder.addEncoder(scalarEncoder4Args["name"], scalarEncoder4)
    encoder.addEncoder(scalarEncoder5Args["name"], scalarEncoder5)
    encoder.addEncoder(scalarEncoder6Args["name"], scalarEncoder6)
    encoder.addEncoder(scalarEncoder7Args["name"], scalarEncoder7)
    encoder.addEncoder(dateEncoderArgs["name"], dateEncoder)

    network = Network()

    network.addRegion("sensor", "py.RecordSensor",
                      json.dumps({"verbosity": verbosity}))

    sensor = network.regions["sensor"].getSelf()
    sensor.encoder = encoder
    sensor.dataSource = FileRecordStream(streamID=inputFilePath)

    # Create the spatial pooler region
    spatialParams["inputWidth"] = sensor.encoder.getWidth()
    network.addRegion("spatialPoolerRegion", "py.SPRegion",
                      json.dumps(spatialParams))

    # Link the SP region to the sensor input
    network.link("sensor", "spatialPoolerRegion", "UniformLink", "")
    network.link("sensor",
                 "spatialPoolerRegion",
                 "UniformLink",
                 "",
                 srcOutput="resetOut",
                 destInput="resetIn")
    network.link("spatialPoolerRegion",
                 "sensor",
                 "UniformLink",
                 "",
                 srcOutput="spatialTopDownOut",
                 destInput="spatialTopDownIn")
    network.link("spatialPoolerRegion",
                 "sensor",
                 "UniformLink",
                 "",
                 srcOutput="temporalTopDownOut",
                 destInput="temporalTopDownIn")

    # Add the TPRegion on top of the SPRegion
    network.addRegion("temporalPoolerRegion", "py.TMRegion",
                      json.dumps(temporalParams))

    network.link("spatialPoolerRegion", "temporalPoolerRegion", "UniformLink",
                 "")
    network.link("temporalPoolerRegion",
                 "spatialPoolerRegion",
                 "UniformLink",
                 "",
                 srcOutput="topDownOut",
                 destInput="topDownIn")

    # Add the AnomalyLikelihoodRegion on top of the TMRegion
    network.addRegion("anomalyLikelihoodRegion", "py.AnomalyLikelihoodRegion",
                      json.dumps({}))
    network.link("temporalPoolerRegion",
                 "anomalyLikelihoodRegion",
                 "UniformLink",
                 "",
                 srcOutput="anomalyScore",
                 destInput="rawAnomalyScore")
    network.link("sensor",
                 "anomalyLikelihoodRegion",
                 "UniformLink",
                 "",
                 srcOutput="sourceOut",
                 destInput="metricValue")

    spatialPoolerRegion = network.regions["spatialPoolerRegion"]

    # Make sure learning is enabled
    spatialPoolerRegion.setParameter("learningMode", True)
    # We want temporal anomalies so disable anomalyMode in the SP. This mode is
    # used for computing anomalies in a non-temporal model.
    spatialPoolerRegion.setParameter("anomalyMode", False)

    temporalPoolerRegion = network.regions["temporalPoolerRegion"]

    # Enable topDownMode to get the predicted columns output
    temporalPoolerRegion.setParameter("topDownMode", True)
    # Make sure learning is enabled (this is the default)
    temporalPoolerRegion.setParameter("learningMode", True)
    # Enable inference mode so we get predictions
    temporalPoolerRegion.setParameter("inferenceMode", True)
    # Enable anomalyMode to compute the anomaly score.
    temporalPoolerRegion.setParameter("anomalyMode", True)

    return network
Пример #8
0
def createTemporalAnomaly(recordParams,
                          spatialParams=_SP_PARAMS,
                          temporalParams=_TP_PARAMS,
                          verbosity=_VERBOSITY):
    """Generates a Network with connected RecordSensor, SP, TP.

  This function takes care of generating regions and the canonical links.
  The network has a sensor region reading data from a specified input and
  passing the encoded representation to an SPRegion.
  The SPRegion output is passed to a TPRegion.

  Note: this function returns a network that needs to be initialized. This
  allows the user to extend the network by adding further regions and
  connections.

  :param recordParams: a dict with parameters for creating RecordSensor region.
  :param spatialParams: a dict with parameters for creating SPRegion.
  :param temporalParams: a dict with parameters for creating TPRegion.
  :param verbosity: an integer representing how chatty the network will be.
  """
    inputFilePath = recordParams["inputFilePath"]
    scalarEncoderArgs = recordParams["scalarEncoderArgs"]
    dateEncoderArgs = recordParams["dateEncoderArgs"]

    scalarEncoder = ScalarEncoder(**scalarEncoderArgs)
    dateEncoder = DateEncoder(**dateEncoderArgs)

    encoder = MultiEncoder()
    encoder.addEncoder(scalarEncoderArgs["name"], scalarEncoder)
    encoder.addEncoder(dateEncoderArgs["name"], dateEncoder)

    network = Network()

    network.addRegion("sensor", "py.RecordSensor",
                      json.dumps({"verbosity": verbosity}))

    sensor = network.regions["sensor"].getSelf()
    sensor.encoder = encoder
    sensor.dataSource = FileRecordStream(streamID=inputFilePath)

    # Create the spatial pooler region
    spatialParams["inputWidth"] = sensor.encoder.getWidth()
    network.addRegion("spatialPoolerRegion", "py.SPRegion",
                      json.dumps(spatialParams))

    # Link the SP region to the sensor input
    network.link("sensor", "spatialPoolerRegion", "UniformLink", "")
    network.link("sensor",
                 "spatialPoolerRegion",
                 "UniformLink",
                 "",
                 srcOutput="resetOut",
                 destInput="resetIn")
    network.link("spatialPoolerRegion",
                 "sensor",
                 "UniformLink",
                 "",
                 srcOutput="spatialTopDownOut",
                 destInput="spatialTopDownIn")
    network.link("spatialPoolerRegion",
                 "sensor",
                 "UniformLink",
                 "",
                 srcOutput="temporalTopDownOut",
                 destInput="temporalTopDownIn")

    # Add the TPRegion on top of the SPRegion
    network.addRegion("temporalPoolerRegion", "py.TPRegion",
                      json.dumps(temporalParams))

    network.link("spatialPoolerRegion", "temporalPoolerRegion", "UniformLink",
                 "")
    network.link("temporalPoolerRegion",
                 "spatialPoolerRegion",
                 "UniformLink",
                 "",
                 srcOutput="topDownOut",
                 destInput="topDownIn")

    spatialPoolerRegion = network.regions["spatialPoolerRegion"]

    # Make sure learning is enabled
    spatialPoolerRegion.setParameter("learningMode", True)
    # We want temporal anomalies so disable anomalyMode in the SP. This mode is
    # used for computing anomalies in a non-temporal model.
    spatialPoolerRegion.setParameter("anomalyMode", False)

    temporalPoolerRegion = network.regions["temporalPoolerRegion"]

    # Enable topDownMode to get the predicted columns output
    temporalPoolerRegion.setParameter("topDownMode", True)
    # Make sure learning is enabled (this is the default)
    temporalPoolerRegion.setParameter("learningMode", True)
    # Enable inference mode so we get predictions
    temporalPoolerRegion.setParameter("inferenceMode", True)
    # Enable anomalyMode to compute the anomaly score.
    temporalPoolerRegion.setParameter("anomalyMode", True)

    return network