Пример #1
0
	def checkGradShape(self, shape):
		if len(shape) != 5:
			raise ModuleError("Grad must be 5d tensor")

		_, outmaps, outd, outh, outw = shape
		_, _, fd, fh, fw = self.W.shape

		dpad, hpad, wpad = self.pad
		ddilation, hdilation, wdilation = self.dilation

		if outmaps != self.W.shape[1] * self.groups:
			raise ModuleError("Grad has %d maps (expected: %d)" % (outmaps, self.W.shape[1] * self.groups))

		if outd + 2 * dpad < ddilation * (fd - 1) + 1:
			raise ModuleError(
				"Grad maps depth is too small (got %d, expected at least %d)" %
				(outd + 2 * dpad, ddilation * (fd - 1) + 1)
			)

		if outh + 2 * hpad < hdilation * (fh - 1) + 1:
			raise ModuleError(
				"Grad maps height is too small (got %d, expected at least %d)" %
				(outh + 2 * hpad, hdilation * (fh - 1) + 1)
			)

		if outw + 2 * wpad < wdilation * (fw - 1) + 1:
			raise ModuleError(
				"Grad maps width is too small (got %d, expected at least %d)" %
				(outw + 2 * wpad, wdilation * (fw - 1) + 1)
			)
Пример #2
0
	def checkDataShape(self, shape):
		if len(shape) != 5:
			raise ModuleError("Data must be 5d tensor")

		_, inmaps, _, _, _ = shape
		if inmaps != self.W.shape[0]:
			raise ModuleError("Data has %d maps (expected: %d)" % (inmaps, self.W.shape[0]))
Пример #3
0
    def checkDataShape(self, shape):
        if len(shape) != 5:
            raise ModuleError("Data must be 5d tensor")

        _, inmaps, ind, inh, inw = shape
        _, _, fd, fh, fw = self.W.shape

        dpad, hpad, wpad = self.pad
        ddilation, hdilation, wdilation = self.dilation

        if inmaps != self.W.shape[1] * self.groups:
            raise ModuleError("Data has %d maps (expected: %d)" %
                              (inmaps, self.W.shape[1] * self.groups))

        if ind + 2 * dpad < ddilation * (fd - 1) + 1:
            raise ModuleError(
                "Data maps depth is too small (got %d, expected >= %d)" %
                (ind + 2 * dpad, ddilation * (fd - 1) + 1))

        if inh + 2 * hpad < hdilation * (fh - 1) + 1:
            raise ModuleError(
                "Data maps height is too small (got %d, expected >= %d)" %
                (inh + 2 * hpad, hdilation * (fh - 1) + 1))

        if inw + 2 * wpad < wdilation * (fw - 1) + 1:
            raise ModuleError(
                "Data maps width is too small (got %d, expected >= %d)" %
                (inw + 2 * wpad, wdilation * (fw - 1) + 1))
Пример #4
0
    def checkGradShape(self, shape):
        if len(shape) != 4:
            raise ModuleError("Grad must be 4d tensor")

        _, _, h, w = shape
        if h % self.scale != 0 or w % self.scale != 0:
            raise ModuleError("Grad map size is not divisible by scale %s" %
                              self.scale)
Пример #5
0
    def checkGradShape(self, shape):
        if len(shape) != 5:
            raise ModuleError("Grad must be 5d tensor")

        _, outmaps, _, _, _ = shape
        if outmaps != self.W.shape[0]:
            raise ModuleError("Grad has %d maps (expected: %d)" %
                              (outmaps, self.W.shape[0]))
Пример #6
0
	def checkDataShape(self, shape):
		if len(shape) != 3:
			raise ModuleError("Data must be 3d tensor")

		_, _, insize = shape
		if insize + 2 * self.pad[1] < self.size[1]:
			raise ModuleError("Data maps size is too small (got %d, expected at least %d)" %
							  (insize + 2 * self.pad[1], self.size[1]))
Пример #7
0
    def checkDataShape(self, shape):
        if len(shape) != 4:
            raise ModuleError("Data must be 4d tensor")

        _, maps, _, _ = shape
        if maps != self.maps:
            raise ModuleError("Data has %d maps (expected: %d)" %
                              (maps, self.maps))
Пример #8
0
    def checkDataShape(self, shape):
        if len(shape) != 2:
            raise ModuleError("Data must be 2d matrix")

        if shape[1] != self.sentlength:
            raise ModuleError(
                "Expected %d data sentence length, %d was given" %
                (self.sentlength, shape[1]))
Пример #9
0
    def checkGradShape(self, shape):
        if len(shape) != 5:
            raise ModuleError("Grad must be 5d tensor")

        _, maps, _, _, _ = shape
        if maps != self.maps:
            raise ModuleError("Grad has %d maps (expected: %d)" %
                              (maps, self.maps))
Пример #10
0
    def checkGradShape(self, shape):
        if len(shape) != 5:
            raise ModuleError("Grad must be 5d tensor")

        _, _, d, h, w = shape
        if d % self.scale != 0 or h % self.scale != 0 or w % self.scale != 0:
            raise ModuleError(
                "Grad map size is not divisible by scale %s (got %s, %s, %s)" %
                (self.scale, d, h, w))
Пример #11
0
    def calcMode(self, T):
        if Config.backend == Config.Backend.cuda:
            if T not in {np.float16, np.float32}:
                raise ModuleError("Unsupported dtype %s" % T)

        elif T != np.float32:
            raise ModuleError("Unsupported dtype %s" % T)

        self.calctype = T
Пример #12
0
    def checkDataShape(self, shapes):
        if not isinstance(shapes, list):
            raise ModuleError("Data must be list of tensors")

        for shape in shapes:
            if len(shape) != 4:
                raise ModuleError("Data must consist of 4d tensors")

            if shape[0] != shapes[0][0]:
                raise ModuleError("Inconsistency in batch size")
Пример #13
0
    def checkGradShape(self, shape):
        if self.axis >= len(shape):
            raise ModuleError(
                "Grad dimension needs to be at least %d, (grad has %d)" %
                (self.axis + 1, len(shape)))

        if shape[self.axis] != self.topk:
            raise ModuleError(
                "Grad topk axis is wrong (got %d, expected exactly %d)" %
                (shape[self.axis], self.topk))
Пример #14
0
    def checkDataShape(self, shape):
        if self.axis >= len(shape):
            raise ModuleError(
                "Data dimension needs to be at least %d, (data has %d)" %
                (self.axis + 1, len(shape)))

        if shape[self.axis] < self.topk:
            raise ModuleError(
                "Data topk axis is too small (got %d, expected at least %d)" %
                (shape[self.axis], self.topk))
Пример #15
0
    def checkGradShape(self, shape):
        if len(shape) != 3:
            raise ModuleError("Grad must be 3d tensor")

        lpad, rpad = self.pad
        size = shape[2]

        if size < lpad + rpad + 1:
            raise ModuleError(
                "Grad maps size is too small (got %d, expected >= %d)" %
                (size, lpad + rpad + 1))
Пример #16
0
    def verifyData(self, data):
        mn, mx = gpuarray.minimum(data).get(), gpuarray.maximum(data).get()
        if mn < -1:
            raise ModuleError(
                "Embedder data verification failed, found index %s (< -1)" %
                mn)

        if mx >= self.W.shape[0]:
            raise ModuleError(
                "Embedder data verification failed, found index %s (vocabulary size is %s)"
                % (mx, self.W.shape[0]))
Пример #17
0
    def checkDataShape(self, shape):
        if len(shape) != 3:
            raise ModuleError("Data must be 3d tensor")

        if self.hintBatchSize is not None and shape[1] != self.hintBatchSize:
            raise ModuleError("Data batch size must be = %s (was given %s)" %
                              (self.hintBatchSize, shape[1]))

        if shape[2] != self.insize:
            raise ModuleError("Data must have data size = %s (was given %s)" %
                              (self.insize, shape[2]))
Пример #18
0
    def checkGradShape(self, shape):
        if self.axis >= len(shape):
            raise ModuleError(
                "Not enough dims in grad (%d were given, need at least %d)" %
                (len(shape), self.axis))

        if self.useWeights:
            if shape[:self.axis] != self.v.shape[:self.axis]:
                raise ModuleError(
                    "Inconsistency in grad and weights shapes (%s  with %s)" %
                    (shape, self.v.shape))
Пример #19
0
    def checkVarOnLoad(self, paramName, dataset):
        if paramName == "W":
            if dataset.shape[1] != self.embsize:
                raise ModuleError("Expected embedding size %s, was given %s" %
                                  (self.embsize, dataset.shape[1]))

            self.setVar("W", Variable(gpuarray.to_gpu(dataset)))

        else:
            raise ModuleError("Unknown parameter name '%s' for embedder" %
                              paramName)
Пример #20
0
 def checkDataShape(self, shape):
     if not self.transpose:
         if shape[-1] != self.W.shape[0]:
             raise ModuleError(
                 "Expected %d data dimensions, %d were given" %
                 (self.W.shape[0], shape[1]))
     else:
         if shape[-1] != self.W.shape[1]:
             raise ModuleError(
                 "Expected %d data dimensions, %d were given" %
                 (self.W.shape[1], shape[1]))
Пример #21
0
    def checkGradShape(self, shape):
        if len(shape) != 3:
            raise ModuleError("Grad must be 3d tensor")

        batchsize, sentlen, embsize = shape

        if embsize != self.embsize:
            raise ModuleError("Expected %d grad embedding size, %d was given" %
                              (self.embsize, embsize))

        if batchsize != self.inData.shape[0]:
            raise ModuleError("Expected %d grad batch size, %d was given" %
                              (self.inData.shape[0], batchsize))
Пример #22
0
    def checkDataShape(self, shape):
        if len(shape) != 3:
            raise ModuleError("Data must be 3d tensor")

        lpad, rpad = self.pad
        size = shape[2]

        pad = max(lpad, rpad)

        if size < pad + 1:
            raise ModuleError(
                "Data maps size is too small (got %d, expected >= %d)" %
                (size, pad + 1))
Пример #23
0
    def checkDataShape(self, shapes):
        dshape, tshape = shapes

        if len(tshape) != 3 or tshape[1:] != (2, 3):
            raise ModuleError("Bad transform shape (%s was given)" % tshape)

        if len(dshape) != 4:
            raise ModuleError("Data must be 4d tensor")

        if tshape[0] != dshape[0]:
            raise ModuleError(
                "Inconsistency in transform and data batch size (%d in transform vs %d in data)"
                % (tshape[0], dshape[0]))
Пример #24
0
    def checkGradShape(self, shape):
        if len(shape) != 4:
            raise ModuleError("Grad must be 4d tensor")

        if self.shape is not None:
            if self.shape != shape[1:]:
                raise ModuleError(
                    "Bad grad shape (was given %s, expected %s)" %
                    (shape[1:], self.shape))
        else:
            if self.inData[0].shape != shape:
                raise ModuleError(
                    "Bad grad shape (was given %s, expected %s)" %
                    (shape, self.inData[0].shape))
Пример #25
0
    def checkGradShape(self, shape):
        if len(shape) != 2:
            raise ModuleError("Grad must be 2d matrix")

        if not self.transpose:
            if shape[1] != self.W.shape[1]:
                raise ModuleError(
                    "Expected %d grad dimensions, %d were given" %
                    (self.W.shape[1], shape[1]))
        else:
            if shape[1] != self.W.shape[0]:
                raise ModuleError(
                    "Expected %d grad dimensions, %d were given" %
                    (self.W.shape[0], shape[1]))
Пример #26
0
    def checkDataShape(self, shape):
        if len(shape) != 4:
            raise ModuleError("Data must be 4d tensor")

        _, _, inh, inw = shape
        if inh + 2 * self.pad[0] < self.size[0]:
            raise ModuleError(
                "Data maps height is too small (got %d, expected at least %d)"
                % (inh + 2 * self.pad[0], self.size[0]))

        if inw + 2 * self.pad[1] < self.size[1]:
            raise ModuleError(
                "Data maps width is too small (got %d, expected at least %d)" %
                (inw + 2 * self.pad[1], self.size[1]))
Пример #27
0
    def checkDataShape(self, shape):
        if len(shape) < self.axis:
            raise ModuleError(
                "Not enough dims in data (%d were given, need at least %d)" %
                (len(shape), self.axis))

        concatDim = 0
        for sec in self.sections:
            concatDim += sec

        if concatDim != shape[self.axis]:
            raise ModuleError(
                "Data shape %s is inconsistent with given sections %s"
                "(expected size %d on axis %d, %d was given)" %
                (shape, self.sections, concatDim, self.axis, shape[self.axis]))
Пример #28
0
    def checkGradShape(self, shape):
        if len(shape) != 4:
            raise ModuleError("Grad must be 4d tensor")

        depth, h, w = 0, 0, 0
        for data in self.inData:
            sh = data.shape

            depth += sh[1]
            h, w = max(h, sh[2]), max(w, sh[3])

        gradshape = (self.inData[0].shape[0], depth, h, w)
        if shape != gradshape:
            raise ModuleError("Bad grad shape (%s given, %s expected)" %
                              (shape, gradshape))
Пример #29
0
    def checkAttrOnLoad(self, attrName, dataset):
        if attrName == "vocab":
            self.setAttr("vocab", dataset)

        else:
            raise ModuleError("Unknown attribute name '%s' for embedder" %
                              attrName)
Пример #30
0
    def checkGradShape(self, shape):
        ln = max(self.src, self.dst)

        if len(shape) - 1 < ln:
            raise ModuleError(
                "Grad dimension needs to be at least %d, (grad has %d)" %
                (ln + 1, len(shape)))