def test_together(self): default = [[1], [2]] b = Buckets(5, default) bc = BucketsCorrected(5, default) default[-1].append(3) default.append([3]) b.clear(1) bc.clear(1) self.assertTrue(bc.default == bc.buckets[1] != default) self.assertTrue(b.default == b.buckets[1] == default)
def __init__(self): self.ter = {} # name -> physical ter self.mesh = {} # tile -> [patches] where patch=(texture,f,v,t) self.nets = {} # tile -> [(type, [points])] self.elevation = {} # ElevationMesh self.currenttile = None self.meshcache = [] # [indices] of current tile self.netcache = None # networks in current tile self.lasttri = None # take advantage of locality of reference self.texcache = TexCache() self.instance_data = empty((0, 5), float32) # Copy of vbo data self.instance_pending = [] # Clutter not yet allocated into vbo self.instance_count = 0 # Allocated and pending vertices self.instance_valid = False self.vector_data = empty((0, 6), float32) # Copy of vbo data self.vector_pending = [] # Vector data not yet allocated into vbo self.vector_count = 0 # Allocated and pending vertices self.vector_indices = empty((0, ), uint32) # Copy of vbo indices self.vector_indices_pending = [ ] # Vector indices not yet allocated into vbo self.vector_indices_count = 0 # Allocated and pending indices self.vector_valid = False self.dynamic_data = empty((0, 6), float32) self.dynamic_pending = set() self.dynamic_valid = False self.buckets = Buckets(self) self.dsfdirs = None # [custom, global, default]
def test_compare(self): default = [[1], [2], [3], 4, 5] b = Buckets(5, default) bc = Buckets_corrected(5, default) default[0].append(6) default.pop(4) self.assertTrue(b.buckets[0] == b.default == default) self.assertFalse(bc.buckets[0] == bc.default == default)
def test_buckets(self): default = [[1], [2]] b = Buckets(5, default) # FAILED self.assertNotEqual(id(default), id(b.default)) default[-1].append(3) # OK self.assertTrue(b.find(3, [2, 3])) default.append([3]) b.clear(3) # FAILED self.assertFalse(b.find(3, [3]))
def flush(self): # invalidate array indices self.currenttile = None self.meshcache = [] self.netcache = None self.lasttri = None self.instance_data = empty((0, 5), float32) self.instance_pending = [] self.instance_valid = False self.instance_count = 0 self.vector_data = empty((0, 6), float32) self.vector_pending = [] self.vector_count = 0 self.vector_indices = empty((0, ), uint32) # Copy of vbo indices self.vector_indices_pending = [ ] # Vector indices not yet allocated into vbo self.vector_indices_count = 0 self.vector_valid = False self.dynamic_data = empty((0, 6), float32) self.dynamic_pending = set() self.dynamic_valid = False self.buckets = Buckets(self)
def realize_dynamic(self, dynamic_vbo): # Allocate into VBO if required. Returns True if VBO updated. if not self.dynamic_valid: if __debug__: clock = time.clock() self.buckets = Buckets(self) # reset data = [] dynamic_count = 0 for placement in self.dynamic_pending: thisdata = placement.bucket_dynamic(dynamic_count, self.buckets) dynamic_count += len(thisdata) / 6 data.append(thisdata) if data: self.dynamic_data = concatenate(data) else: self.dynamic_data = empty((0, ), float32) self.dynamic_valid = True dynamic_vbo.set_array(self.dynamic_data) if __debug__: print "%6.3f time to realize dynamic VBO, size %dK" % ( time.clock() - clock, self.dynamic_data.size / 256) return True else: return False
def test_incorrect_buckets(self): default = [1, 2, 3, 4] b = Buckets(5, default) default.append(5) self.assertTrue(b.find(4, [1, 2, 3, 4, 5]))
def test_buckets(self): # FAILED try: b = Buckets() default = [[1], [2]] b = Buckets(5, default) # FAILED self.assertNotEqual(id(default), id(b.default)) default[-1].append(3) # OK self.assertTrue(b.find(3, [2, 3])) default.append([3]) b.clear(3) # FAILED self.assertFalse(b.find(3, [3])) item = 125 b.add(0, item) item += 1 # FAILED self.assertTrue(b.find(0, item))
def bcalm(input_filename, output_filename, k, m): input_file = open(input_filename) simple_buckets = False if simple_buckets: minimizers = sorted(list(set(map(lambda s: minimizer("".join(s), m), product("acgt", repeat=m))))) buckets = Buckets(minimizers, output_filename) else: m *= 2 minimizers = sorted(list(set(map(lambda s: minimizer("".join(s), m), product("acgt", repeat=m))))) buckets = Superbuckets(minimizers, output_filename) precompute_hashes(m) # partition k-mers for line in input_file: kmer = line.strip()[:-1] bucket_minimizer = minimizer(kmer, m) buckets.put(kmer, bucket_minimizer) buckets.flush() buckets.stats() # process each bucket in minimizer order for bucket_file, bucket_minimizer in buckets.iterate(): G = Graph(k) buckets.flush() G.importg(bucket_file) G.debruijn() G.compress(bucket_minimizer, m) for node in G.nodes.values(): if use_tags: node = untag(node) min = minbutbiggerthan(node[: k - 1], node[-(k - 1) :], bucket_minimizer, m) buckets.put(node, min)
#!/usr/bin/python import csv from tweet import Tweet from buckets import Buckets from modelBuilder import ModelBuilder from classifier import Classifier import time from multiprocessing import Process, Queue import sys #make it into buckets buckets = Buckets(10) modelSettings = {} modelSettings['useMentions'] = 1 modelSettings['useLinks'] = 1 modelBuilder = ModelBuilder() stopwords = [] startTime = time.time() if(len(sys.argv) > 1): stopnum = sys.argv[1] else: stopnum = 1 if(len(sys.argv) > 2): tweetFileName = 'inputData/' + sys.argv[2] + '.csv'