Beispiel #1
0
    def compress(self, n=0, l=0):
        '''
        change from scaling function basis to multi-wavelet basis (s -> d)
        tree is filled out with s[0][0] and d
        n is level in tree
        l is box index
        '''
        if self.compressed: return

        # sub-trees can be done in parallel
        if not self.s[n + 1].has_key(2 * l): self.compress(n + 1, 2 * l)
        if not self.s[n + 1].has_key(2 * l + 1):
            self.compress(n + 1, 2 * l + 1)

        k = self.k
        s = Vector(2 * k)
        s[:k], s[k:] = self.s[n + 1][2 * l], self.s[n + 1][2 * l + 1]

        # apply the two scale relationship to get difference coeff
        # in 1d this is O(k^2) flops (in 3d this is O(k^4) flops)
        d = s * self.hgT

        self.s[n][l] = Vector(d[:k])
        self.d[n][l] = Vector(d[k:])
        del self.s[n + 1][2 * l], self.s[n + 1][2 * l + 1]

        if n == 0: self.compressed = 1
Beispiel #2
0
    def reconstruct(self, n=0, l=0):
        '''
        change from multi-wavelet basis to scaling function basis (d -> s)
        tree just has s at leaves
        n is level in tree
        l is box index
        '''
        if not self.compressed: return

        if self.d[n].has_key(l):
            k = self.k
            d = Vector(2 * k)
            d[:k], d[k:] = self.s[n][l], self.d[n][l]
            del self.d[n][l], self.s[n][l]

            # apply the two scale relationship to get difference coeff
            # in 1d this is O(k^2) flops (in 3d this is O(k^4) flops)
            s = d * self.hg

            self.s[n + 1][2 * l] = Vector(s[:k])
            self.s[n + 1][2 * l + 1] = Vector(s[k:])

            # sub-trees can be done in parallel
            self.reconstruct(n + 1, 2 * l)
            self.reconstruct(n + 1, 2 * l + 1)

        if n == 0: self.compressed = 0
Beispiel #3
0
 def recur_down(self, n, l, s):
     '''
     In s are scaling coefficients for box n,l ... apply twoscale to generate
     the corresponding coefficients on level n+1 and insert the results into
     the tree of scaling function coefficients.
     '''
     k = self.k
     d = Vector(2 * k)
     d[:k] = s
     s = d * self.hg
     self.s[n + 1][2 * l] = Vector(s[:k])
     self.s[n + 1][2 * l + 1] = Vector(s[k:])
Beispiel #4
0
 def evaluate_at_box(self, k, coeff, n, l, nn, ll, x):
     if isinstance(x, list) or isinstance(x, Vector):
         value_list = []
         for i in range(len(x)):
             coordinate = (x[i] + ll) * (2.0**(n - nn)) - l
             p = Vector(phi(coordinate, k))
             value_list.append(coeff.inner(p) * math.sqrt(2.0**n))
         return Vector(value_list)
     else:
         coordinate = (x + ll) * (2.0**(n - nn)) - l
         p = Vector(phi(coordinate, self.k))
         value = coeff.inner(p) * math.sqrt(2.0**n)
         return value
Beispiel #5
0
 def init_quadrature(self, order):
     x, w = gauss_legendre(order)
     self.quad_w = Vector(w)
     self.quad_x = Vector(x)
     self.quad_npt = npt = len(w)
     self.quad_phi = Matrix(npt, self.k)  # phi[point,i]
     self.quad_phiT = Matrix(self.k, npt)  # phi[point,i] transpose
     self.quad_phiw = Matrix(npt, self.k)  # phi[point,i]*weight[point]
     for i in xrange(npt):
         p = phi(self.quad_x[i], self.k)
         for m in xrange(self.k):
             self.quad_phi[i, m] = p[m]
             self.quad_phiT[m, i] = p[m]
             self.quad_phiw[i, m] = w[i] * p[m]
Beispiel #6
0
 def copy(self):
     '''
     Return a deep copy of self
     '''
     result = Function(self.k, self.thresh)
     result.compressed = self.compressed
     result.f = self.f
     for n in self.s.keys():
         for l in self.s[n].keys():
             result.s[n][l] = Vector(self.s[n][l])
     for n in self.d.keys():
         for l in self.d[n].keys():
             result.d[n][l] = Vector(self.d[n][l])
     return result
Beispiel #7
0
    def get_coeffs(self, n, l):
        '''
        If the scaling coefficeints in box n,l exist, return them.
        (allow here for zero boundary conditions for boxes just off
        the ends of the domain)

        Else recur up to the next level looking for a parent.  If a
        parent exists, use two scale to recur those coefficients down
        to make n,l.  Note that this modifies the tree in place and
        you should eventually call sclean to tidy up when finished.
        
        Else, return None (corresponding child boxes exist at a finer scale)
        '''
        if l < 0 or l >= 2**n: return Vector(self.k)
        if self.s[n].has_key(l): return self.s[n][l]

        if n > 0:
            s = self.get_coeffs(n - 1, l / 2)
            if not s: return None
        else:
            return None  # no parent was found

        self.recur_down(n - 1, l / 2, s)

        return self.s[n][l]
Beispiel #8
0
    def refine(self, n, l):
        '''
        refine numerical representation of f(x) to desired tolerance
        n is level in tree
        l is box index
        '''
        # project f(x) at next level
        s0, s1 = self.project(n + 1, 2 * l), self.project(n + 1, 2 * l + 1)
        k = self.k
        s = Vector(2 * k)
        s[:k], s[k:] = s0, s1

        # apply the two scale relationship to get difference coeff
        # in 1d this is O(k^2) flops (in 3d this is O(k^4) flops)
        d = s * self.hgT

        # check to see if within tolerance
        # normf() is Frobenius norm == 2-norm for vectors
        if d[k:].normf() < self.thresh or n >= (self.max_level - 1):
            # put into tree at level n+1
            self.s[n + 1][2 * l], self.s[n + 1][2 * l + 1] = s0, s1
        else:
            # these recursive calls on sub-trees can go in parallel
            self.refine(n + 1, 2 * l)
            self.refine(n + 1, 2 * l + 1)
Beispiel #9
0
def train(vocab_size: int = 1000, num_epochs: int = 20, batch_size: int = 32, learning_rate: float = 0.05):
    data, targets = prepare(vocab_size, False)
    logistic = Classifier(vocab_size)

    for epoch in range(num_epochs):
        for datum, target in data_iteration(data, targets, batch_size):
            datum = Matrix(datum)
            target = Vector(target)

            output = logistic.forward(datum)

            grad = logistic.backward(datum, output, target)
            logistic.W += grad * learning_rate

        outputs = logistic.forward(Matrix(data))
        acc = accuracy(outputs, Vector(targets))
        l = loss(outputs, Vector(targets))
        print(f'acc => {acc:.2f}, loss => {l:.2f}')
Beispiel #10
0
    def get_scaling_coeff(self, scaling_coeff, wavelet_coeff, n, l):
        k = self.k
        d = Vector(2 * k)
        d[:k], d[k:] = scaling_coeff, wavelet_coeff

        is_odd = l - 2 * (l / 2)

        # apply the two scale relationship to get difference coeff
        # in 1d this is O(k^2) flops (in 3d this is O(k^4) flops)
        if not is_odd:
            s = d * self.hg0
        else:
            s = d * self.hg1
        return s
Beispiel #11
0
    def box_quad(self, other, n, l):
        '''
        Take the inner product in the box [n][l] with an external analytic 
        expression (i.e. not a madness function)
        '''
        if isinstance(other, Function):
            err_str = "box_quad() takes an externally provided analytic "
            err_str += "expression. If you want to take the inner product "
            err_str += "with another MADNESS function, use inner()."
            raise Exception(err_str)

        if self.compressed: self.reconstruct()

        x = Vector(self.quad_npt)
        g = Vector(self.quad_npt)

        h = 0.5**n
        scale = math.sqrt(h)
        for mu in xrange(self.quad_npt):
            x[mu] = (self.quad_x[mu] + l) * h
            g[mu] = other(x[mu])

        return (self.quad_phiw * self.s[n][l]).inner(g) * scale
Beispiel #12
0
    def __evaluate(self, n, l, x):
        '''
        eval f(x) using adaptively refined numerical representation of f(x)
        answer should be within tolerance of the analytical f(x)

        Descend tree looking for box (n,l) with scaling function
        coefficients containing the point x.
        '''
        if self.s[n].has_key(l):
            p = Vector(phi(x, self.k))
            return self.s[n][l].inner(p) * math.sqrt(2.0**n)
        else:
            n, l, x = n + 1, 2 * l, 2 * x
            if x >= 1: l, x = l + 1, x - 1
            return self.__evaluate(n, l, x)
Beispiel #13
0
 def project(self, n, l):
     '''
     s[n][l] = integral(phi[n][l](x) * f(x))
     for box (n,l) project f(x) using quadrature rule
      into scaling function basis
     '''
     s = Vector(self.k)
     h = 0.5**n
     scale = math.sqrt(h)
     for mu in xrange(self.quad_npt):
         x = (l + self.quad_x[mu]) * h
         f = self.f(x)
         for i in xrange(self.k):
             s[i] += scale * f * self.quad_phiw[mu, i]
     return s
Beispiel #14
0
    def gaxpy_iter(self, alpha, other, beta, n=0, l=0):
        '''
        recursive "iteration" for gaxpy
        '''
        if self.d[n].has_key(l) or other.d[n].has_key(l):
            if self.d[n].has_key(l) and other.d[n].has_key(l):
                self.d[n][l].gaxpy(alpha, other.d[n][l], beta)

            elif not self.d[n].has_key(l) and other.d[n].has_key(l):
                self.d[n][l] = Vector(other.d[n][l]).scale(beta)

            elif self.d[n].has_key(l) and not other.d[n].has_key(l):
                self.d[n][l].scale(alpha)

            # calls on sub-trees can go in parallel
            self.gaxpy_iter(alpha, other, beta, n + 1, 2 * l)
            self.gaxpy_iter(alpha, other, beta, n + 1, 2 * l + 1)
Beispiel #15
0
    def refine_limited(self, other, n, l):
        '''
        refine numerical representation of f(x) to desired tolerance
        but don't refine any more than the finest level of other
        n is level in tree
        l is box index
        Note:   This version does not have the desired effect.
                Use with caution and skepticism.
        '''
        if other.compressed: other.reconstruct()

        # project f(x) at next level
        s0, s1 = self.project(n + 1, 2 * l), self.project(n + 1, 2 * l + 1)
        k = self.k
        s = Vector(2 * k)
        s[:k], s[k:] = s0, s1

        # apply the two scale relationship to get difference coeff
        # in 1d this is O(k^2) flops (in 3d this is O(k^4) flops)
        d = s * self.hgT

        # check to see if within tolerance
        # normf() is Frobenius norm == 2-norm for vectors
        if ((d[k:].normf() < self.thresh)
                or (other.s[n + 1].has_key(2 * l)
                    and other.s[n + 1].has_key(2 * l + 1))):
            # put into tree at level n+1
            self.s[n + 1][2 * l], self.s[n + 1][2 * l + 1] = s0, s1
        elif other.s[n + 1].has_key(2 * l):
            self.s[n + 1][2 * l] = s0
            self.refine_limited(other, n + 1, 2 * l + 1)
        elif other.s[n + 1].has_key(2 * l + 1):
            self.refine_limited(other, n + 1, 2 * l)
            self.s[n + 1][2 * l + 1] = s1
        else:
            # these recursive calls on sub-trees can go in parallel
            self.refine_limited(other, n + 1, 2 * l)
            self.refine_limited(other, n + 1, 2 * l + 1)
Beispiel #16
0
 def add_coefficients(self,coeff1,coeff2):
     temp = Vector(coeff1)
     temp.gaxpy(1.0, coeff2, 1.0)
     return temp
Beispiel #17
0
    def __init__(self, input_size: int):
        self.input_size = input_size

        self.W = Vector([random() / (input_size ** 0.5) for _ in range(input_size)])
        self.b = Vector([0.])
Beispiel #18
0
 def add_coefficients(self, coeff1, coeff2):
     temp = Vector(coeff1)
     temp.gaxpy(1.0, coeff2, 1.0)
     return temp