Example #1
0
def save_load_benchmark():
    from relstorage.cache import LocalClientBucket
    from io import BytesIO

    bucket = LocalClientBucket(10*1024*1024)

    i = 1
    j = 0
    while bucket.size < bucket.limit - i:
        val = (str(j) * i).encode('ascii')
        if len(val) > bucket.limit or bucket.size + len(val) > bucket.limit:
            break
        bucket[str(i)] = val
        if i < 1096:
            i += 50
            j += 1
        else:
            j += 1
            i += 1
        print("Len", len(bucket), "size",bucket.size, "i", i)

    print("Len", len(bucket), "size", bucket.size)
    number = 50
    import timeit
    import statistics
    import cProfile
    import pstats

    def write():
        io = BytesIO()
        bucket.write_to_file(io)

    bio = BytesIO()
    bucket.write_to_file(bio)

    def load():
        bio.seek(0)
        b2 = LocalClientBucket(bucket.limit)
        b2.load_from_file(bio)

    write_timer = timeit.Timer(write)
    write_times = write_timer.repeat(number=number)

    print("write average", statistics.mean(write_times), "stddev", statistics.stdev(write_times))

    #pr = cProfile.Profile()
    #pr.enable()

    read_timer = timeit.Timer(load)
    read_times = read_timer.repeat(number=number)
    #pr.disable()
    #ps = pstats.Stats(pr).sort_stats('cumulative')
    #ps.print_stats()

    print("read average", statistics.mean(read_times), "stddev", statistics.stdev(read_times))
Example #2
0
 def load():
     bio.seek(0)
     b2 = LocalClientBucket(bucket.limit)
     b2.load_from_file(bio)
Example #3
0
 def load():
     b2 = LocalClientBucket(bucket.limit)
     _Loader.load_local_cache(cache_options, cache_pfx, b2)
Example #4
0
def save_load_benchmark():
    from relstorage.cache import LocalClientBucket, _Loader
    from io import BytesIO
    import os
    import itertools

    import sys
    sys.setrecursionlimit(500000)
    bucket = LocalClientBucket(500 * 1024 * 1024)
    print("Testing", type(bucket._dict))

    size_dists = [100] * 800 + [300] * 500 + [1024] * 300 + [2048] * 200 + [
        4096
    ] * 150

    with open('/dev/urandom', 'rb') as rnd:
        data = [rnd.read(x) for x in size_dists]
    data_iter = itertools.cycle(data)

    for j, datum in enumerate(data_iter):
        if len(datum
               ) > bucket.limit or bucket.size + len(datum) > bucket.limit:
            break
        # To ensure the pickle memo cache doesn't just write out "use object X",
        # but distinct copies of the strings, we need to copy them
        bucket[str(j)] = datum[:-1] + b'x'
        assert bucket[str(j)] is not datum
        #print("Len", len(bucket), "size", bucket.size, "dlen", len(datum))

    print("Len", len(bucket), "size", bucket.size)
    number = 1
    import timeit
    import statistics
    import cProfile
    import pstats

    cache_pfx = "pfx"
    cache_options = MockOptions()
    cache_options.cache_local_dir = '/tmp'
    cache_options.cache_local_dir_compress = False

    def write():
        fname = _Loader.save_local_cache(cache_options, cache_pfx, bucket)
        os.remove(fname)

    def load():
        b2 = LocalClientBucket(bucket.limit)
        _Loader.load_local_cache(cache_options, cache_pfx, b2)

    #write_timer = timeit.Timer(write)
    #write_times = write_timer.repeat(number=number)
    #print("write average", statistics.mean(write_times), "stddev", statistics.stdev(write_times))

    #read_timer = timeit.Timer(load)
    #read_times = read_timer.repeat(number=number)
    #print("read average", statistics.mean(read_times), "stddev", statistics.stdev(read_times))

    #pr = cProfile.Profile()
    #pr.enable()

    fname = _Loader.save_local_cache(cache_options, cache_pfx, bucket)
    print("Saved to", fname)
    #pr.disable()
    #ps = pstats.Stats(pr).sort_stats('cumulative')
    #ps.print_stats()
    #return

    pr = cProfile.Profile()
    pr.enable()
    _Loader.load_local_cache(cache_options, cache_pfx,
                             LocalClientBucket(bucket.limit))
    pr.disable()
    ps = pstats.Stats(pr).sort_stats('cumulative')
    ps.print_stats(.4)