_s = time.clock() for i in xrange(gruns): gpu_func() gpu_speed = (time.clock() - _s) / gruns _s = time.clock() cpu_func() cpu_speed = (time.clock() - _s) print 'CPU speed: %.3f' % (cpu_speed * 1000) print 'GPU speed: %.3f' % (gpu_speed * 1000) print cpu_speed / gpu_speed if __name__ == '__main__': testmod.set_device(0) n = 1e3 k = 16 data = randn(n, k).astype(np.float32) mean = randn(k) cov = np.array(util.random_cov(k), dtype=np.float32) j = 32 padded_data = util.pad_data(data) chol_sigma = chol(cov) ichol_sigma = L.inv(chol_sigma) logdet = np.log(np.linalg.det(cov))
def acquire_device(self): testmod.set_device(self.device)