def run(): # For the stream "num_per_bucket" timewidth_seconds is 1000 and # num_shards is 1, so all of these inserts go into the same bucket. On each # iteration we adds 200k more events to the same bucket and then read 10k # events to see how much slower reading is now. Overall we insert 2m events # in batches on 200k each. for n in xrange(200000, 2200000, 200000): insert('num_per_bucket', 200000) timeit('n=%s' % n, read, 'num_per_bucket', 10000, start=500)
def run(): for n in xrange(5000, 30000, 5000): stream = "read_size_%d" % n insert(stream, 100000) timeit("read_size=%d" % n, read, stream, 100000)
def run(): for n in (1, 3, 6, 12, 24): stream = 'shards_%s' % n insert(stream, 200000) timeit('shards=%s' % n, read, stream, 150000)