Beispiel #1
0
  def reset(self, params, repetition):
    random.seed(params['seed'])

    if params['encoding'] == 'basic':
      self.encoder = BasicEncoder(params['encoding_num'])
    elif params['encoding'] == 'distributed':
      self.encoder = DistributedEncoder(params['encoding_num'],
                                        maxValue=params['encoding_max'],
                                        minValue=params['encoding_min'],
                                        classifyWithRandom=params[
                                          'classify_with_random'])
    else:
      raise Exception("Encoder not found")

    if params['dataset'] == 'simple':
      self.dataset = SimpleDataset()
    elif params['dataset'] == 'reber':
      self.dataset = ReberDataset(maxLength=params['max_length'])
    elif params['dataset'] == 'high-order':
      self.dataset = HighOrderDataset(numPredictions=params['num_predictions'])
    else:
      raise Exception("Dataset not found")

    self.computeCounter = 0

    self.history = []
    self.resets = []
    self.randoms = []
    self.currentSequence = self.dataset.generateSequence()

    self.net = None
    self.sequenceCounter = 0
  def reset(self, params, repetition):
    random.seed(params['seed'])

    if params['dataset'] == 'simple':
      self.dataset = SimpleDataset()
    elif params['dataset'] == 'reber':
      self.dataset = ReberDataset(maxLength=params['max_length'])
    elif params['dataset'] == 'high-order':
      self.dataset = HighOrderDataset(numPredictions=params['num_predictions'],
                                      seed=params['seed'])
      print "Sequence dataset: "
      print " Symbol Number {}".format(self.dataset.numSymbols)
      for seq in self.dataset.sequences:
        print seq

    elif params['dataset'] == 'high-order-long':
      self.dataset = LongHighOrderDataset(params['sequence_length'],
                                          seed=params['seed'])
      print "Sequence dataset: "
      print " Symbol Number {}".format(self.dataset.numSymbols)
      for seq in self.dataset.sequences:
        print seq
    else:
      raise Exception("Dataset not found")

    self.randomStart = self.dataset.numSymbols + 1
    self.randomEnd = self.randomStart + 5000

    MODEL_PARAMS['modelParams']['sensorParams']['encoders']['element']\
      ['categoryList'] = range(self.randomEnd)

    # if not os.path.exists(resultsDir):
    #   os.makedirs(resultsDir)
    # self.resultsFile = open(os.path.join(resultsDir, "0.log"), 'w')
    if params['verbosity'] > 0:
      print " initializing HTM model..."
    self.model = ModelFactory.create(MODEL_PARAMS)
    self.model.enableInference({"predictedField": "element"})
    # self.classifier = SDRClassifier(steps=[1], alpha=0.001)

    self.mapping = getEncoderMapping(self.model, self.dataset.numSymbols)

    self.numPredictedActiveCells = []
    self.numPredictedInactiveCells = []
    self.numUnpredictedActiveColumns = []

    self.currentSequence = []
    self.targetPrediction = []
    self.replenish_sequence(params, iteration=0)

    self.resets = []
    self.randoms = []
    self.verbosity = 1
    self.sequenceCounter = 0
Beispiel #3
0
    def reset(self, params, repetition):
        random.seed(params['seed'])

        if params['encoding'] == 'basic':
            self.encoder = BasicEncoder(params['encoding_num'])
        elif params['encoding'] == 'distributed':
            self.encoder = DistributedEncoder(
                params['encoding_num'],
                maxValue=params['encoding_max'],
                minValue=params['encoding_min'],
                classifyWithRandom=params['classify_with_random'])
        else:
            raise Exception("Encoder not found")

        if params['dataset'] == 'simple':
            self.dataset = SimpleDataset()
        elif params['dataset'] == 'reber':
            self.dataset = ReberDataset(maxLength=params['max_length'])
        elif params['dataset'] == 'high-order':
            self.dataset = HighOrderDataset(
                numPredictions=params['num_predictions'], seed=params['seed'])
        else:
            raise Exception("Dataset not found")

        self.computeCounter = 0

        self.history = []
        self.resets = []
        self.randoms = []

        self.currentSequence = []
        self.targetPrediction = []
        self.replenishSequence(params, iteration=0)

        self.net = buildNetwork(params['encoding_num'],
                                params['num_cells'],
                                params['encoding_num'],
                                hiddenclass=LSTMLayer,
                                bias=True,
                                outputbias=params['output_bias'],
                                recurrent=True)

        self.trainer = BackpropTrainer(self.net,
                                       dataset=SequentialDataSet(
                                           params['encoding_num'],
                                           params['encoding_num']),
                                       learningrate=0.01,
                                       momentum=0,
                                       verbose=params['verbosity'] > 0)

        self.sequenceCounter = 0
Beispiel #4
0
  def reset(self, params, repetition):
    random.seed(params['seed'])

    if params['encoding'] == 'basic':
      self.encoder = BasicEncoder(params['encoding_num'])
    elif params['encoding'] == 'distributed':
      self.encoder = DistributedEncoder(params['encoding_num'],
                                        params['encoding_num_non_random'],
                                        maxValue=params['encoding_max'],
                                        minValue=params['encoding_min'])
    elif params['encoding'] == 'sparse-distributed':
      self.encoder = SparseDistributedEncoder(params['encoding_num'],
                                              params['encoding_num_non_random'],
                                              params['encoding_active_bits'])
    else:
      raise Exception("Encoder not found")

    if params['dataset'] == 'simple':
      self.dataset = SimpleDataset()
    elif params['dataset'] == 'reber':
      self.dataset = ReberDataset(maxLength=params['max_length'])
    elif params['dataset'] == 'high-order':
      self.dataset = HighOrderDataset(numPredictions=params['num_predictions'],
                                      seed=params['seed'])
    else:
      raise Exception("Dataset not found")

    self.numLags = params['num_lags']

    self.computeCounter = 0
    self.history = []
    self.resets = []


    self.finishInitializeX = False
    self.randoms = []

    self.currentSequence = []
    self.targetPrediction = []
    self.replenishSequence(params, iteration=0)

    self.net = buildNetwork(params['encoding_num'] * params['num_lags'],
                            params['num_cells'],
                            params['encoding_num'],
                            bias=True,
                            outputbias=True)
    # self.trainer = BackpropTrainer(self.net, dataset=trndata, momentum=0.1, verbose=True, weightdecay=0.01)
    self.sequenceCounter = 0
Beispiel #5
0
    def reset(self, params, repetition):
        random.seed(params['seed'])

        if params['encoding'] == 'basic':
            self.encoder = BasicEncoder(params['encoding_num'])
        elif params['encoding'] == 'distributed':
            self.encoder = DistributedEncoder(
                params['encoding_num'],
                params['encoding_num_non_random'],
                maxValue=params['encoding_max'],
                minValue=params['encoding_min'])
        elif params['encoding'] == 'sparse-distributed':
            self.encoder = SparseDistributedEncoder(
                params['encoding_num'], params['encoding_num_non_random'],
                params['encoding_active_bits'])
        else:
            raise Exception("Encoder not found")

        if params['dataset'] == 'simple':
            self.dataset = SimpleDataset()
        elif params['dataset'] == 'reber':
            self.dataset = ReberDataset(maxLength=params['max_length'])
        elif params['dataset'] == 'high-order':
            self.dataset = HighOrderDataset(
                numPredictions=params['num_predictions'], seed=params['seed'])
        else:
            raise Exception("Dataset not found")

        self.numLags = params['num_lags']

        self.history = []
        self.resets = []

        self.finishInitializeX = False
        self.randoms = []

        self.currentSequence = []
        self.targetPrediction = []
        self.replenishSequence(params, iteration=0)

        self.net = initializeELMnet(params['encoding_num'] *
                                    params['num_lags'],
                                    params['encoding_num'],
                                    numNeurons=params['num_cells'])
        self.sequenceCounter = 0
Beispiel #6
0
    def reset(self, params, repetition):
        random.seed(params['seed'])

        if params['dataset'] == 'simple':
            self.dataset = SimpleDataset()
        elif params['dataset'] == 'reber':
            self.dataset = ReberDataset(maxLength=params['max_length'])
        elif params['dataset'] == 'high-order':
            self.dataset = HighOrderDataset(
                numPredictions=params['num_predictions'],
                seed=params['seed'],
                smallAlphabet=params['use_small_alphabet'])
            print "Sequence dataset: "
            print " Symbol Number {}".format(self.dataset.numSymbols)
            for seq in self.dataset.sequences:
                print seq

        elif params['dataset'] == 'high-order-long':
            self.dataset = LongHighOrderDataset(params['sequence_length'],
                                                seed=params['seed'])
            print "Sequence dataset: "
            print " Symbol Number {}".format(self.dataset.numSymbols)
            for seq in self.dataset.sequences:
                print seq
        else:
            raise Exception("Dataset not found")

        self.randomStart = self.dataset.numSymbols + 1
        self.randomEnd = self.randomStart + 5000

        MODEL_PARAMS['modelParams']['sensorParams']['encoders']['element']\
          ['categoryList'] = range(self.randomEnd)

        # if not os.path.exists(resultsDir):
        #   os.makedirs(resultsDir)
        # self.resultsFile = open(os.path.join(resultsDir, "0.log"), 'w')
        if params['verbosity'] > 0:
            print " initializing HTM model..."
            # print MODEL_PARAMS
        self.model = ModelFactory.create(MODEL_PARAMS)
        self.model.enableInference({"predictedField": "element"})
        # self.classifier = SDRClassifier(steps=[1], alpha=0.001)
        print "finish initializing HTM model "

        if params['kill_cell_percent'] > 0:
            # a hack to use faulty temporal memory instead
            self.model._getTPRegion().getSelf()._tfdr = MonitoredFaultyTPShim(
                numberOfCols=2048,
                cellsPerColumn=32,
                newSynapseCount=32,
                maxSynapsesPerSegment=128,
                maxSegmentsPerCell=128,
                initialPerm=0.21,
                connectedPerm=0.50,
                permanenceInc=0.10,
                permanenceDec=0.10,
                predictedSegmentDecrement=0.01,
                minThreshold=15,
                activationThreshold=15,
                seed=1960,
            )

        self.mapping = getEncoderMapping(self.model, self.dataset.numSymbols)

        self.numPredictedActiveCells = []
        self.numPredictedInactiveCells = []
        self.numUnpredictedActiveColumns = []

        self.currentSequence = []
        self.targetPrediction = []
        self.replenish_sequence(params, iteration=0)

        self.resets = []
        self.randoms = []
        self.verbosity = 1
        self.sequenceCounter = 0