Beispiel #1
0
    def parse(target, outnetwork=Network):
        """
        NNXSerializer.parse(target) -> Network object

        Loads an external XML-preserved neural network into its "Network" 
        object
        * "target" is either a filename or a file object
        * "outnetwork" defines the output wrapper (output class)
        """
        if not issubclass(outnetwork, Network):
            raise TypeError("The argument 1 must be a class derived from "
                            "Network")
        Validators.bfileobject(target)
        root = ET.parse(target).getroot()
        L = [[
            NeuronView.merge(
                map(float,
                    neuron.get(NNXSerializer.ATTR_WEIGHTS).split(",")),
                float(neuron.get(NNXSerializer.ATTR_BIAS))) for neuron in layer
        ] for layer in root]
        speed = float(root.get(NNXSerializer.ATTR_SPEED))
        alg = root.get(NNXSerializer.ATTR_ALG)
        if hasattr(Algorithms, alg):
            return Network(*L, n=speed, algorithm=getattr(Algorithms, alg))
        return outnetwork(*L, n=speed)
Beispiel #2
0
    def __getitem__(self, key):
        """
        H.__getitem__(x) <==> H[x]

        This implementation accepts double-indexation, i.e using tuple of
        two indices to reach out the neuron.
        
        Example: H.__getitem__((l, n)) <==> H[(l, n)] <==> H[l, n] 
        - the argument 'l' (integer) indicates the layer;
        - the argument 'n'(integer of slice) indicates the neurons at 
        the l-th layer.    
        H[l] -> map of neurons if the argument 'l' is an integer or tuple 
        of layers if the argument 'l' is a slice.
        H[l, n]  -> NeuronView if 'n' is integer, map of neurons otherwise
        """
        L = self._nn._links
        if isinstance(key, _Sequence) and len(key) == 2:
            layer_index = Validators.arrayindex(key[0], len(self))
            neuron_index = key[1]
            if isinstance(neuron_index, slice):
                neuron_index = slice(
                    *neuron_index.indices(len(L[layer_index])))
                return map(NeuronView, L[layer_index][neuron_index])
            neuron_index = Validators.arrayindex(neuron_index,
                                                 len(L[layer_index]))
            return NeuronView(L[layer_index][neuron_index])
        elif isinstance(key, slice):
            return tuple(
                map(NeuronView, l) for l in L[slice(*key.indices(len(self)))])
        return map(NeuronView, L[Validators.arrayindex(key, len(self))])
Beispiel #3
0
    def parse(target, outresult=ImageSelection):
        """
        ISSerializer.parse(target) -> ImageSelection
        
        Loads an image selection from its XML-representation.
        * "target" is either a filename or a file object
        * "outresult" is either the ImageSelection class of a subclass
        thereof
        """
        if not issubclass(outresult, ImageSelection):
            raise TypeError("'outresult' argument be either the"
                            "ImageSelection class or a subclass thereof.")
        Validators.bfileobject(target)
        root = ET.parse(target).getroot()
        snipfunc = root.get(ISSerializer.ATTR_SNIPPER)
        bwidth = int(root.get(ISSerializer.ATTR_BLOCK_WIDTH))
        bheight = int(root.get(ISSerializer.ATTR_BLOCK_HEIGHT))
        snipfunc = getattr(imgsnipper, snipfunc)

        S = outresult(bwidth, bheight, snipper=snipfunc)
        for image in root:
            path = image.get(ISSerializer.ATTR_PATH)
            rfunc = image.get(ISSerializer.ATTR_RESULT_FUNC)
            result = getattr(imgresult, rfunc).from_string(image.text)
            S.append((path, result))
        return S
Beispiel #4
0
 def write(source, target):
     """
     ISSerializer.write(source, target) -> None
     
     Writes your image selection to the specified file
     * "source" is an instance of ImageSelection or of a subclass
     thereof
     * "target" is either the name of a file of a binary file object
     """
     if not isinstance(source, ImageSelection):
         raise TypeError("Invalid source type. ImageSelection "
                         "implementations are only acceptible")
     Validators.bfileobject(target)
     snipfunc, bwidth, bheight = source.snipper
     #root attributes contain snipper parameters
     root = ET.Element(ISSerializer.TAG_ROOT,
                       attrib={
                           ISSerializer.ATTR_SNIPPER: snipfunc.__name__,
                           ISSerializer.ATTR_BLOCK_WIDTH: str(bwidth),
                           ISSerializer.ATTR_BLOCK_HEIGHT: str(bheight)
                       })
     #Sub elements provide image links and result values
     for path, result in zip(source.paths(), source.results()):
         sub = ET.SubElement(root,
                             ISSerializer.TAG_IMG,
                             attrib={
                                 ISSerializer.ATTR_PATH:
                                 path,
                                 ISSerializer.ATTR_RESULT_FUNC:
                                 result.__class__.__name__
                             })
         sub.text = result.to_string()
     ET.ElementTree(root).write(target)
Beispiel #5
0
 def __setitem__(self, key, value):
     """R.__setitem((w, h), value) <==> R[w, h] = value"""
     if not isinstance(value, int):
         return TypeError("Integer values are only acceptable.")
     w, h = key
     w = Validators.arrayindex(w, self.width)
     h = Validators.arrayindex(h, self.height)
     self.__matrix[h][w] = value
    def test_arrayindex(self):
        #apply inappropriate values:

        for x in range(_NUM):
            rnd = randint(0, 1000)
            self.assertRaises(IndexError, Validators.arrayindex, -rnd, rnd - 1)
            self.assertRaises(IndexError, Validators.arrayindex, -rnd - 1, rnd)
            self.assertRaises(IndexError, Validators.arrayindex, rnd, rnd)
            self.assertRaises(IndexError, Validators.arrayindex, rnd, 2)
            self.assertRaises(TypeError, Validators.arrayindex, str(rnd), rnd)

            rnd2 = randint(0, 1000)
            self.assertEqual(Validators.arrayindex(-rnd, rnd + rnd2), rnd2)
            self.assertEqual(Validators.arrayindex(rnd, rnd + rnd2), rnd)
Beispiel #7
0
 def __getitem__(self, key):
     """O.__getitem__(x) <==> O[x]"""
     if isinstance(key, slice):
         key = slice(*key.indices(len(self)))
         return map(NeuronView,
                    self._nn._links[-1][slice(*key.indices(len(self)))])
     return NeuronView(self._nn._links[-1][Validators.arrayindex(
         key, len(self))])
Beispiel #8
0
 def insert(self, index):
     """
     Inserts input neuron before the index
     """
     index = Validators.arrayindex(index, self._nn.ninps + 1)
     #traverse the next layer and insert corresponding weights
     for s in self._nn._links[0]:
         s.insert(index, Primitives.initweight())
     self._nn._ninps += 1  #correct constant
     self._refresh()
Beispiel #9
0
 def __getitem(collection, key, length):
     if isinstance(key, slice):
         index = slice(*key.indices(length))
     else:
         index = Validators.arrayindex(key, nlayrs)
     return
     if isinstance(key, _Sequence):
         return Network.__getitem(collection[int(key[0])],
                                  key[1:] if len(key) > 2 else key[1])
     return collection[key]
Beispiel #10
0
 def __delitem__(self, key):
     """I.__delitem__(k) <==> del I[k] -- delete one input"""
     key = Validators.arrayindex(key, len(self))
     if len(self) < 2:
         raise ValueError("The input layer cannot be removed")
     #traverse the next layer and remove corresponding links:
     for x in self._nn._links[0]:
         del x[key]
     self._nn._ninps -= 1  #correct constant
     self._refresh()
 def __getitem__(self, key):
     self_length = len(self)
     if isinstance(key, _Sequence):
         raise NotImplementedError("Sequence index assigning is not "
                                   "implemented")
     elif isinstance(key, slice):
         return [
             self.get_item(*self.__getdindex(k))
             for k in range(*key.indices(self_length))
         ]
     elif isinstance(key, int):
         return self.get_item(
             *self.__getdindex(Validators.arrayindex(key, self_length)))
     raise TypeError("Wrong type of the specified index.")
Beispiel #12
0
    def write(source, target):
        """
        NNXSerializer.write(source, target) -> None

        Writes your neural network to the specified file
        * "source" is an instance of "Network" or of a subclass thereof
        * "target" is either a file name or a binary file object
        """
        if not isinstance(source, Network):
            raise TypeError(
                "The neural network to serialize must be "
                "an instance of Network, or of a subclass thereof, "
                "Not %s" % type(source))
        Validators.bfileobject(target)
        #root attributes countain overall parameters and layers
        root = ET.Element(NNXSerializer.TAG_ROOT,
                          attrib={
                              NNXSerializer.ATTR_SPEED: str(source.speed),
                              NNXSerializer.ATTR_ALG: source.algorithm.__name__
                          })
        #SubElement function provides a way to create new sub-elements for
        #a given element.
        for layer in source._links:  #traverse all layers
            sub = ET.SubElement(root, NNXSerializer.TAG_LAYER)
            for neuron in map(NeuronView, layer):
                ET.SubElement(sub,
                              NNXSerializer.TAG_NEURON,
                              attrib={
                                  NNXSerializer.ATTR_WEIGHTS:
                                  ", ".join(map(str, neuron.weights)),
                                  NNXSerializer.ATTR_BIAS:
                                  str(neuron.bias)
                              })
        #When encoding is US-ASCII or UTF-8 ET's output is binary!
        #Because the output is binary only BufferedIOBase-like objects
        #are accepted.
        ET.ElementTree(root).write(target)
Beispiel #13
0
 def __setitem__(self, key, value):
     """O.__setitem__(x, v) <==> O[x] = v"""
     L = self._nn._links
     nlinks = self._pllen(len(L) - 1)  #number of links per neuron
     if isinstance(key, slice):
         key = key.indices(len(self))
         if not len(value) and len(range(*key)) == len(self):
             raise IndexError("The output layer cannot be removed")
         L[-1][slice(*key)] = (_LinkedLayer.validate_individual(
             val, -1, nlinks) for val in value)
     else:
         L[-1][Validators.arrayindex(
             key, len(self))] = _LinkedLayer.validate_individual(
                 value, -1, nouts)
     self._refresh()
Beispiel #14
0
 def insert(self, index, value):
     """
     S.insert item before the index
     """
     path, result = value
     if not isinstance(result, imgresult.ImageResultBase):
         raise TypeError("The 'result' must implement "
                         "imgresult.ImgageResultBase")
     index = Validators.arrayindex(index, len(self) + 1)
     if not imghdr.what(path) in ('jpeg', 'png', 'gif', 'bmp', 'tiff',
                                  'pbm', 'bgm', 'ppm'):
         return TypeError("The specified file is not supported")
     self.__items.insert(index, path)
     self.__results.insert(index, result)
     return index
Beispiel #15
0
 def __getitem__(self, key):
     """
     S.__getitem__((index, *parser)) -> tuple
     Return value: results, ImageSnipper [,ImageSniper, ...]
     
     * 'parser' -- any one-argument function for mapping pixels
     * 'index' -- index of the desired item
     """
     if len(key) < 2:
         raise IndexError("The specified key can not be allowed. "
                          "The key must include one index and at least one "
                          "parser function")
     index = Validators.arrayindex(key[0], len(self))
     surface = image.load(self.__items[index])
     return ((self.__results, ) + tuple(
         self.__snipper(
             surface, self.__bwidth, self.__bheight, pix_parser=parser)
         for parser in key[1:]))
Beispiel #16
0
 def __setitem__(self, key, value):
     """n.__setitem__(x, v) <==> n[x]"""
     if isinstance(key, slice):
         indices = key.indices(len(self))
         #ensure that the count of elements for copying is equal to
         #the range of the specified slice
         if len(value) != len(range(*indices)):
             raise ValueError("The assigned values do not match the "
                              "range of the specified slice, i.e. "
                              "len(indices({})) != len(values({}))".format(
                                  list(range(*indices)), value))
         elif not all(isinstance(v, _Number) for v in value):
             raise TypeError(
                 "The assined values must be numbers, i.e. "
                 "the values must be instances of numbers.Number")
         key = slice(*indices)
     else:
         key = Validators.arrayindex(key, len(self))
         if not isinstance(value, _Number):
             raise TypeError("The weights of the neuron must be numbers"
                             ", not {}".format(type(value)))
     self._neuron[key] = value  #the arguments are trustad
Beispiel #17
0
    def __setitem__(self, key, value):
        #inappropriate values will be refused
        L = self._nn._links
        if isinstance(key, _Sequence) and len(key) == 2:
            layer_index = Validators.arrayindex(key[0], len(self))
            neuron_index = key[1]
            if isinstance(neuron_index, slice):
                neuron_index = slice(
                    *neuron_index.indices(len(L[layer_index])))
                old_layer_length = len(L[layer_index])  #initial length
                #throws ValueError if a step of the slice != 1 and
                #len(value) != len(L(slice))
                L[layer_index][neuron_index] = (
                    _LinkedLayer.validate_individual(v, layer_index,
                                                     self._pllen(layer_index))
                    for v in value)

                new_layer_length = len(L[layer_index])
                if not new_layer_length:
                    #the layer will be deleted if the count of neurons
                    #equals zero
                    del self[layer_index]
                elif old_layer_length != new_layer_length:
                    #adjust the next layer manually if the count of
                    #neurons has been changed

                    #Considering that the count of neurons can be lesser
                    #or higher than the initial length after setting,
                    #the negative and positive mutation is handling
                    #separatly:
                    diff = new_layer_length - old_layer_length
                    stop = neuron_index.stop  #last value
                    #In this case either start <= stop or (stop == 0 and
                    #start >= stop).
                    if diff > 0:
                        for n in L[layer_index + 1]:
                            #inserts before n[stop]
                            n[stop:stop] = (Primitives.initweight()
                                            for i in range(diff))
                    else:
                        #adjust the next layer, i.e remove all redundant
                        #links
                        for n in L[layer_index + 1]:
                            #notice that the diff is negative
                            del n[stop + diff:stop]
            else:
                neuron_index = Validators.arrayindex(neuron_index,
                                                     len(L[layer_index]))
                L[layer_index][  #one to one substitution
                    neuron_index] = _LinkedLayer.validate_individual(
                        value, layer_index, self._pllen(layer_index))
                #no adjusting
        elif isinstance(key, slice):
            #raises ValueError if slice step is lesser than 1:
            key = slice(*key.indices(len(self)))
            #with step or not
            if abs(key.step) > 1:  #avoids the 1 and -1 step values
                #raises an error if there is attempt to assign
                #a sequence of the specified size to an extended slice
                #of the unmatched size
                L[key] = ([
                    _LinkedLayer.validate_individual(node, lr_i,
                                                     self._pllen(lyr_i))
                    for node in value[lr_i]
                ] for lr_i in range(key.start, key.stop, key.step))
                #adjust last all front neighboors:
                for i in range(key.start + 1, key.stop + 1, key.step):
                    Primitives.adjustlayer(L[i], self._pllen(i))
            else:
                #no errors if sequences' lengths do not match
                #can also remove layer when using notation N[x] = []
                top_bound = key.start + len(value)
                L[key] = ([
                    _LinkedLayer.validate_individual(node, nlyr, nlinks)
                    for node in value[indx]
                ] for indx, nlinks, nlyr in zip(
                    range(len(value)),
                    chain((self._pllen(key.start),
                           ), map(len, value)), range(key.start, top_bound)))
                #adjust the layer in front of the slice:
                Primitives.adjustlayer(L[top_bound], self._pllen(top_bound))
        else:
            key = Validators.arrayindex(key, len(self))
            L[key] = (_LinkedLayer.validate_individual(node, key,
                                                       self._pllen(key))
                      for node in value)
            #adjust the next layer
            Primitives.adjustlayer(L[key + 1], len(L[key]))
        self._refresh()  #reestablish learning algorithm
Beispiel #18
0
 def __setitem__(self, index, value):
     """S.__setitem__(i, v) <==> S[i] = v"""
     index = Validators.arrayindex(index, len(self))
     index = self.insert(index, value)
     del self[index + 1]