def k_nearest_timing_test(d=2,k=1,tests=[VectorTree]): from plastk.rand import uniform,seed from plastk.utils import time_call import time new_seed = time.time() for e in range(3,20): print '===================' for db_type in tests: print print "Testing",db_type db = db_type(vector_len=d) n = 2**e seed(int(new_seed),int(new_seed%1 * 1000000)) print "Adding",n,"data points....", total_add_time = 0.0 for i in range(n): x = uniform(0,1,(d,)) start = time.clock() db.add(x,None) end = time.clock() total_add_time += end-start print "done. Average add time = %4.3f ms." %((total_add_time/n)*1000) print "Average search search time...", seed(0,0) print '%6.3f ms'% (1000*time_call(100,lambda: db.k_nearest(uniform(0,1,(d,)),k)))
def radius_timing_test(d=2,radius=0.1,tests=[VectorTree]): from plastk.rand import uniform,seed from plastk.utils import time_call import time for e in range(3,20): for db_type in tests: print print "Testing",db_type db = db_type(vector_len=d) n = 2**e print "Adding",n,"data points....", for i in range(n): x = uniform(0,1,(d,)) db.add(x,None) print "done." print "Average search search time...", seed(0,0) start = time.clock() total_results = 0 for i in range(100): results,dists = db.find_in_radius(uniform(0,1,(d,)),radius) total_results += len(results) end = time.clock() print (end-start)/100 print "Average results size:", total_results/100.0
def setUp(self): self.training_data = [ rand.uniform(-1, 1, self.num_inputs) for x in range(self.training_size) ] self.test_data = [ rand.uniform(-1, 1, self.num_inputs) for x in range(self._test_size) ] self.W = rand.uniform(-1, 1, (self.num_outputs, self.num_inputs))
def __init__(self,**params): from plastk.rand import uniform from Numeric import zeros super(GNG,self).__init__(**params) N = self.initial_num_units self.weights = uniform(self.rmin,self.rmax,(N,self.dim)) self.dists = zeros((N,1)) * 0.0 self.error = zeros((N,1)) * 0.0 self.connections = [{} for i in range(N)] self.last_input = zeros(self.dim) self.count = 0 if self.initial_connections_per_unit > 0: for w in self.weights: self.present_input(w) ww = self.winners(self.initial_connections_per_unit+1) i = ww[0] for j in ww[1:]: self.add_connection(i,j) self.nopickle += ['_activation_fn'] self.unpickle()
def __init__(self, **params): from plastk.rand import uniform from Numeric import zeros super(GNG, self).__init__(**params) N = self.initial_num_units self.weights = uniform(self.rmin, self.rmax, (N, self.dim)) self.dists = zeros((N, 1)) * 0.0 self.error = zeros((N, 1)) * 0.0 self.connections = [{} for i in range(N)] self.last_input = zeros(self.dim) self.count = 0 if self.initial_connections_per_unit > 0: for w in self.weights: self.present_input(w) ww = self.winners(self.initial_connections_per_unit + 1) i = ww[0] for j in ww[1:]: self.add_connection(i, j) self.nopickle += ['_activation_fn'] self.unpickle()
def ranseq(x): """ Generator that gives a random-length sequence of the integers ascending from 0. The length is selected from the uniform distribution over the range [0,x). """ for i in range(int(rand.uniform(0,x))): yield i
def ranseq(x): """ Generator that gives a random-length sequence of the integers ascending from 0. The length is selected from the uniform distribution over the range [0,x). """ for i in range(int(rand.uniform(0, x))): yield i
def testall(n=1000,d=2): from plastk.rand import uniform vt = VectorTree() kd = KDTree() flat = FlatVectorDB() for i in range(n): v = uniform(0,1,(d,)) flat.add(v,None) kd.add(v,None) vt.add(v,None) return flat,kd,vt
def _combine(self,q,Xs,Ys,weights): q = array(q) X = array(Xs) rows,cols = X.shape if rows < cols: self.verbose("Falling back to weighted averaging.") return weighted_average(Ys,weights) Y = array(Ys) W = Numeric.identity(len(weights))*weights Z = mult(W,X) v = mult(W,Y) if self.ridge_range: ridge = Numeric.identity(cols) * rand.uniform(0,self.ridge_range,(cols,1)) Z = join((Z,ridge)) v = join((v,Numeric.zeros((cols,1)))) B,residuals,rank,s = linear_least_squares(Z,v) if len(residuals) == 0: self.verbose("Falling back to weighted averaging.") return weighted_average(Ys,weights) estimate = mult(q,B) # we estimate the variance as the sum of the # residuals over the squared sum of the weights variance = residuals/sum(weights**2) stderr = Numeric.sqrt(variance)/Numeric.sqrt(sum(weights)) return estimate,stderr
def setUp(self): rand.seed(0,0) self.data = [(rand.uniform(0,1,(self.dim,)),None) for i in range(self.N)] for x,y in self.data: self.db.add(x,y)
def testvt(n=1000): from plastk.rand import uniform db = VectorTree() for i in range(n): db.add(uniform(0,1,(2,)),None) return db
def testflat(n=1000): from plastk.rand import uniform db = FlatVectorDB() for i in range(n): db.add(uniform(0,1,(2,)),None) return db
def setUp(self): rand.seed(0, 0) self.data = [(rand.uniform(0, 1, (self.dim, )), None) for i in range(self.N)] for x, y in self.data: self.db.add(x, y)
def __init__(self, **params): super(LinearFnApprox, self).__init__(**params) # self.w = zeros((self.num_outputs,self.num_inputs)) * 1.0 self.w = rand.uniform(-1, 1, (self.num_outputs, self.num_inputs)) * 1.0
def setUp(self): self.training_data = [rand.uniform(-1, 1, self.num_inputs) for x in range(self.training_size)] self.test_data = [rand.uniform(-1, 1, self.num_inputs) for x in range(self._test_size)] self.W = rand.uniform(-1, 1, (self.num_outputs, self.num_inputs))
def __init__(self,**params): super(LinearFnApprox,self).__init__(**params) # self.w = zeros((self.num_outputs,self.num_inputs)) * 1.0 self.w = rand.uniform(-1,1,(self.num_outputs,self.num_inputs)) * 1.0