Пример #1
0
    def distributedVector(self, s):
        # h = int(hashlib.md5(s.encode()).hexdigest(),16) % 100000000              #probably too slow and not necessary ??
        # h = abs(mmh3.hash(s)) % 1000000

        h = abs(op.hash(s)) % 4294967295

        # h = np.abs(hash(s))         #devo hashare s in qualche modo (controllare che basti) e
        np.random.seed(h)  #inizializzare np.random.seed()
        if self.mode == "binary":
            q = 1 / np.sqrt(self.dimension)
            return np.random.choice([-q, q], self.dimension)
        else:
            return op.random_vector(self.dimension, normalized=False)
Пример #2
0
    def distributedVector(self, s):
        if s in self.random_cache:
            return self.random_cache[s]
        # h = int(hashlib.md5(s.encode()).hexdigest(),16) % 100000000              #probably too slow and not necessary ??
        # h = abs(mmh3.hash(s)) % 1000000

        h = abs(op.hash(s)) % 4294967295

        # h = np.abs(hash(s))         #devo hashare s in qualche modo (controllare che basti) e
        np.random.seed(h)            #inizializzare np.random.seed()
        v_ = op.random_vector(self.dimension,normalized=False)
        self.random_cache[s] = v_
        return v_
Пример #3
0
__author__ = 'lorenzo'

import kerMIT.operation
import numpy


def covariance(a, b):
    return numpy.cov(a.outer(b))


if __name__ == '__main__':
    dimension = 100
    size = 10000
    M = numpy.zeros((dimension, size))
    for i in range(size):
        a = operation.random_vector(dimension)
        b = operation.random_vector(dimension)
        c = operation.circular_convolution(a, b)
        M[:, i] = c

    C = numpy.cov(M)
    print(numpy.round(C * dimension, 2))

    print(sum(x for x in numpy.nditer(C)) * dimension)  #sum of all elements
    print(numpy.trace(
        C * dimension))  #trace.. should be approximately equal to dimension

    print(numpy.round(numpy.diag(C) * dimension, 1))