class InferenceType(Enum("TemporalNextStep",
                         "TemporalClassification",
                         "NontemporalClassification",
                         "TemporalAnomaly",
                         "NontemporalAnomaly",
                         "TemporalMultiStep",
                         "NontemporalMultiStep")):


  __temporalInferenceTypes = None

  @staticmethod
  def isTemporal(inferenceType):
    """ Returns True if the inference type is 'temporal', i.e. requires a
    temporal pooler in the network.
    """
    if InferenceType.__temporalInferenceTypes is None:
      InferenceType.__temporalInferenceTypes = \
                                set([InferenceType.TemporalNextStep,
                                     InferenceType.TemporalClassification,
                                     InferenceType.TemporalAnomaly,
                                     InferenceType.TemporalMultiStep,
                                     InferenceType.NontemporalMultiStep])

    return inferenceType in InferenceType.__temporalInferenceTypes
예제 #2
0
class InferenceType(
        Enum("TemporalNextStep", "TemporalClassification",
             "NontemporalClassification", "TemporalAnomaly",
             "NontemporalAnomaly", "TemporalMultiStep",
             "NontemporalMultiStep")):
    """
  Enum: one of the following:

  - ``TemporalNextStep``
  - ``TemporalClassification``
  - ``NontemporalClassification``
  - ``TemporalAnomaly``
  - ``NontemporalAnomaly``
  - ``TemporalMultiStep``
  - ``NontemporalMultiStep``
  """

    __temporalInferenceTypes = None

    @staticmethod
    def isTemporal(inferenceType):
        """ Returns True if the inference type is 'temporal', i.e. requires a
    temporal memory in the network.
    """
        if InferenceType.__temporalInferenceTypes is None:
            InferenceType.__temporalInferenceTypes = \
                                      set([InferenceType.TemporalNextStep,
                                           InferenceType.TemporalClassification,
                                           InferenceType.TemporalAnomaly,
                                           InferenceType.TemporalMultiStep,
                                           InferenceType.NontemporalMultiStep])

        return inferenceType in InferenceType.__temporalInferenceTypes
예제 #3
0
class InferenceElement(
        Enum(
            prediction="prediction",
            encodings="encodings",
            classification="classification",
            anomalyScore="anomalyScore",
            anomalyLabel="anomalyLabel",
            classConfidences="classConfidences",
            multiStepPredictions="multiStepPredictions",
            multiStepBestPredictions="multiStepBestPredictions",
            multiStepBucketLikelihoods="multiStepBucketLikelihoods",
            multiStepBucketValues="multiStepBucketValues",
        )):

    __inferenceInputMap = {
        "prediction": "dataRow",
        "encodings": "dataEncodings",
        "classification": "category",
        "classConfidences": "category",
        "multiStepPredictions": "dataDict",
        "multiStepBestPredictions": "dataDict",
    }

    __temporalInferenceElements = None

    @staticmethod
    def getInputElement(inferenceElement):
        """ Get the sensor input element that corresponds to the given inference
    element. This is mainly used for metrics and prediction logging
    """
        return InferenceElement.__inferenceInputMap.get(inferenceElement, None)

    @staticmethod
    def isTemporal(inferenceElement):
        """ Returns True if the inference from this timestep is predicted the input
    for the NEXT timestep.

    NOTE: This should only be checked IF THE MODEL'S INFERENCE TYPE IS ALSO
    TEMPORAL. That is, a temporal model CAN have non-temporal inference elements,
    but a non-temporal model CANNOT have temporal inference elements
    """
        if InferenceElement.__temporalInferenceElements is None:
            InferenceElement.__temporalInferenceElements = \
                                      set([InferenceElement.prediction])

        return inferenceElement in InferenceElement.__temporalInferenceElements

    @staticmethod
    def getTemporalDelay(inferenceElement, key=None):
        """ Returns the number of records that elapse between when an inference is
    made and when the corresponding input record will appear. For example, a
    multistep prediction for 3 timesteps out will have a delay of 3


    Parameters:
    -----------------------------------------------------------------------

    inferenceElement:   The InferenceElement value being delayed
    key:                If the inference is a dictionary type, this specifies
                        key for the sub-inference that is being delayed
    """
        # -----------------------------------------------------------------------
        # For next step prediction, we shift by 1
        if inferenceElement in (InferenceElement.prediction,
                                InferenceElement.encodings):
            return 1
        # -----------------------------------------------------------------------
        # For classification, anomaly scores, the inferences immediately succeed the
        # inputs
        if inferenceElement in (InferenceElement.anomalyScore,
                                InferenceElement.anomalyLabel,
                                InferenceElement.classification,
                                InferenceElement.classConfidences):
            return 0
        # -----------------------------------------------------------------------
        # For multistep prediction, the delay is based on the key in the inference
        # dictionary
        if inferenceElement in (InferenceElement.multiStepPredictions,
                                InferenceElement.multiStepBestPredictions):
            return int(key)

        # -----------------------------------------------------------------------
        # default: return 0
        return 0

    @staticmethod
    def getMaxDelay(inferences):
        """
    Returns the maximum delay for the InferenceElements in the inference
    dictionary

    Parameters:
    -----------------------------------------------------------------------
    inferences:   A dictionary where the keys are InferenceElements
    """
        maxDelay = 0
        for inferenceElement, inference in inferences.iteritems():
            if isinstance(inference, dict):
                for key in inference.iterkeys():
                    maxDelay = max(
                        InferenceElement.getTemporalDelay(
                            inferenceElement, key), maxDelay)
            else:
                maxDelay = max(
                    InferenceElement.getTemporalDelay(inferenceElement),
                    maxDelay)

        return maxDelay
예제 #4
0
import os
from abc import ABCMeta, abstractmethod
import types

from pkg_resources import resource_filename

from nupic.frameworks.opf.opf_utils import validateOpfJsonValue
from nupic.frameworks.opf.opf_task_driver import (
    IterationPhaseSpecInferOnly, IterationPhaseSpecLearnAndInfer)
from nupic.support.enum import Enum

FILE_SCHEME = "file://"

# Enum to characterize potential generation environments
OpfEnvironment = Enum(Nupic='nupic', Experiment='opfExperiment')


class DescriptionIface(with_metaclass(ABCMeta, object)):
    """
  This is the base interface class for description API classes which provide
  OPF configuration parameters.

  This mechanism abstracts description API from the specific description objects
  created by the individual users.

  TODO: logging interface?

  :param modelConfig: (dict)
      Holds user-defined settings for model creation.  See OPF
      `here <description-template.html>`_ for config dict documentation.
예제 #5
0
from abc import ABCMeta, abstractmethod
import logging
import types
import validictory

from nupic.frameworks.opf.opfutils import (validateOpfJsonValue)
from nupic.frameworks.opf.opftaskdriver import (IterationPhaseSpecLearnOnly,
                                                IterationPhaseSpecInferOnly,
                                                IterationPhaseSpecLearnAndInfer
                                                )
from nupic.support.enum import Enum

###############################################################################
# Enum to characterize potential generation environments
OpfEnvironment = Enum(Grok='grok', Experiment='opfExperiment')


###############################################################################
class DescriptionIface(object):
    """ This is the base interface class for description API classes which provide
  OPF configuration parameters.

  This mechanism abstracts description API from the specific description objects
  created by the individiual users.

  TODO: logging interface?
  """
    __metaclass__ = ABCMeta

    @abstractmethod
예제 #6
0
class InferenceElement(
        Enum(
            prediction="prediction",
            encodings="encodings",
            classification="classification",
            anomalyScore="anomalyScore",
            anomalyLabel="anomalyLabel",
            classConfidences="classConfidences",
            multiStepPredictions="multiStepPredictions",
            multiStepBestPredictions="multiStepBestPredictions",
            multiStepBucketLikelihoods="multiStepBucketLikelihoods",
            multiStepBucketValues="multiStepBucketValues",
        )):
    """
  The concept of InferenceElements is a key part of the OPF. A model's inference
  may have multiple parts to it. For example, a model may output both a
  prediction and an anomaly score. Models output their set of inferences as a
  dictionary that is keyed by the enumerated type InferenceElement. Each entry
  in an inference dictionary is considered a separate inference element, and is
  handled independently by the OPF.
  """

    __inferenceInputMap = {
        "prediction": "dataRow",
        "encodings": "dataEncodings",
        "classification": "category",
        "classConfidences": "category",
        "multiStepPredictions": "dataDict",
        "multiStepBestPredictions": "dataDict",
    }

    __temporalInferenceElements = None

    @staticmethod
    def getInputElement(inferenceElement):
        """
    Get the sensor input element that corresponds to the given inference
    element. This is mainly used for metrics and prediction logging

    :param inferenceElement: (:class:`.InferenceElement`)
    :return: (string) name of sensor input element
    """
        return InferenceElement.__inferenceInputMap.get(inferenceElement, None)

    @staticmethod
    def isTemporal(inferenceElement):
        """
    .. note:: This should only be checked IF THE MODEL'S INFERENCE TYPE IS ALSO
       TEMPORAL. That is, a temporal model CAN have non-temporal inference
       elements, but a non-temporal model CANNOT have temporal inference
       elements.

    :param inferenceElement: (:class:`.InferenceElement`)
    :return: (bool) ``True`` if the inference from this time step is predicted
             the input for the NEXT time step.
    """
        if InferenceElement.__temporalInferenceElements is None:
            InferenceElement.__temporalInferenceElements = \
                                      set([InferenceElement.prediction])

        return inferenceElement in InferenceElement.__temporalInferenceElements

    @staticmethod
    def getTemporalDelay(inferenceElement, key=None):
        """
    :param inferenceElement: (:class:`.InferenceElement`) value being delayed
    :param key: (string) If the inference is a dictionary type, this specifies
                key for the sub-inference that is being delayed.
    :return: (int) the number of records that elapse between when an inference
             is made and when the corresponding input record will appear. For
             example, a multistep prediction for 3 timesteps out will have a
             delay of 3.
    """
        # -----------------------------------------------------------------------
        # For next step prediction, we shift by 1
        if inferenceElement in (InferenceElement.prediction,
                                InferenceElement.encodings):
            return 1
        # -----------------------------------------------------------------------
        # For classification, anomaly scores, the inferences immediately succeed the
        # inputs
        if inferenceElement in (InferenceElement.anomalyScore,
                                InferenceElement.anomalyLabel,
                                InferenceElement.classification,
                                InferenceElement.classConfidences):
            return 0
        # -----------------------------------------------------------------------
        # For multistep prediction, the delay is based on the key in the inference
        # dictionary
        if inferenceElement in (InferenceElement.multiStepPredictions,
                                InferenceElement.multiStepBestPredictions,
                                InferenceElement.multiStepBucketLikelihoods):
            return int(key)

        # -----------------------------------------------------------------------
        # default: return 0
        return 0

    @staticmethod
    def getMaxDelay(inferences):
        """
    :param inferences: (dict) where the keys are :class:`.InferenceElement`s
    :return: (int) the maximum delay for the :class:`.InferenceElement`s in
             the inference dictionary
    """
        maxDelay = 0
        for inferenceElement, inference in inferences.iteritems():
            if isinstance(inference, dict):
                for key in inference.iterkeys():
                    maxDelay = max(
                        InferenceElement.getTemporalDelay(
                            inferenceElement, key), maxDelay)
            else:
                maxDelay = max(
                    InferenceElement.getTemporalDelay(inferenceElement),
                    maxDelay)

        return maxDelay