Ejemplo n.º 1
0
 def testAnd():
     examples = [[[0, 0, 1], [0.1]], [[0, 1, 1], [.1]], [[1, 0, 1], [.1]],
                 [[1, 1, 1], [.9]]]
     net = Network.Network([3, 1])
     t = BackProp()
     t.TrainOnLine(examples, net)
     return net
Ejemplo n.º 2
0
  def testXor():
    examples = [[[0, 0, 1], [.1]], [[0, 1, 1], [.9]], [[1, 0, 1], [.9]], [[1, 1, 1], [.1]]]
    net = Network.Network([3, 3, 1])

    t = BackProp(speed=.8)
    t.TrainOnLine(examples, net, errTol=0.2)
    return net
Ejemplo n.º 3
0
 def _trainExamples(self, ex, arch=[3, 1], useAvgErr=False):
     net = Network.Network(arch)
     t = Trainers.BackProp()
     t.TrainOnLine(ex,
                   net,
                   errTol=self.trainTol,
                   useAvgErr=useAvgErr,
                   silent=True)
     errs = [abs(x[-1] - net.ClassifyExample(x)) for x in ex]
     return net, errs
Ejemplo n.º 4
0
  def testOr():
    examples = [[[0, 0, 1], [0.1]], [[0, 1, 1], [.9]], [[1, 0, 1], [.9]], [[1, 1, 1], [.9]]]
    net = Network.Network([3, 1])
    t = BackProp()
    t.TrainOnLine(examples, net, maxIts=1000, useAvgErr=0)
    print('classifications:')
    for example in examples:
      res = net.ClassifyExample(example[0])
      print('%f -> %f' % (example[1][0], res))

    return net
Ejemplo n.º 5
0
    def testLinear():
        examples = [
            [.1, .1],
            [.2, .2],
            [.3, .3],
            [.4, .4],
            [.8, .8],
        ]
        net = Network.Network([1, 2, 1])
        t = BackProp(speed=.8)
        t.TrainOnLine(examples, net, errTol=0.1, useAvgErr=0)
        print('classifications:')
        for example in examples:
            res = net.ClassifyExample(example[:-1])
            print('%f -> %f' % (example[-1], res))

        return net
Ejemplo n.º 6
0
 def _trainExamples(self, ex, arch=[3, 1]):
     net = Network.Network(arch)
     t = Trainers.BackProp()
     t.TrainOnLine(ex, net, errTol=self.trainTol, useAvgErr=0, silent=1)
     errs = map(lambda x, y=net: abs(x[-1] - y.ClassifyExample(x)), ex)
     return errs
Ejemplo n.º 7
0
def CrossValidationDriver(examples,
                          attrs=[],
                          nPossibleVals=[],
                          holdOutFrac=.3,
                          silent=0,
                          tolerance=0.3,
                          calcTotalError=0,
                          hiddenSizes=None,
                          **kwargs):
    """
    **Arguments**

      - examples: the full set of examples

      - attrs: a list of attributes to consider in the tree building
         *This argument is ignored*

      - nPossibleVals: a list of the number of possible values each variable can adopt
         *This argument is ignored*

      - holdOutFrac: the fraction of the data which should be reserved for the hold-out set
         (used to calculate the error)

      - silent: a toggle used to control how much visual noise this makes as it goes.

      - tolerance: the tolerance for convergence of the net

      - calcTotalError: if this is true the entire data set is used to calculate
           accuracy of the net

      - hiddenSizes: a list containing the size(s) of the hidden layers in the network.
           if _hiddenSizes_ is None, one hidden layer containing the same number of nodes
           as the input layer will be used

    **Returns**

       a 2-tuple containing:

         1) the net

         2) the cross-validation error of the net

    **Note**
      At the moment, this is specific to nets with only one output

  """
    nTot = len(examples)
    if not kwargs.get('replacementSelection', 0):
        testIndices, trainIndices = SplitData.SplitIndices(nTot,
                                                           holdOutFrac,
                                                           silent=1,
                                                           legacy=1,
                                                           replacement=0)
    else:
        testIndices, trainIndices = SplitData.SplitIndices(nTot,
                                                           holdOutFrac,
                                                           silent=1,
                                                           legacy=0,
                                                           replacement=1)
    trainExamples = [examples[x] for x in trainIndices]
    testExamples = [examples[x] for x in testIndices]

    nTrain = len(trainExamples)
    if not silent:
        print('Training with %d examples' % (nTrain))

    nInput = len(examples[0]) - 1
    nOutput = 1
    if hiddenSizes is None:
        nHidden = nInput
        netSize = [nInput, nHidden, nOutput]
    else:
        netSize = [nInput] + hiddenSizes + [nOutput]
    net = Network.Network(netSize)
    t = Trainers.BackProp()
    t.TrainOnLine(trainExamples,
                  net,
                  errTol=tolerance,
                  useAvgErr=0,
                  silent=silent)

    nTest = len(testExamples)
    if not silent:
        print('Testing with %d examples' % nTest)
    if not calcTotalError:
        xValError, _ = CrossValidate(net, testExamples, tolerance)
    else:
        xValError, _ = CrossValidate(net, examples, tolerance)
    if not silent:
        print('Validation error was %%%4.2f' % (100 * xValError))
    net._trainIndices = trainIndices
    return net, xValError