Example #1
0
def test_concurrent_upload(metasync, opts):

    def _put(srv, path, remote_path):
        with open(path, "rb") as f:
            srv.put(remote_path, f.read())

    # bump files
    tmpdir = os.path.join(opts.tmpdir, "metasync-files")
    sizes  = [1024, 2048, 4192, 8192, 1*MB]
    files  = []
    total_size = 1*MB

    print tmpdir

    util.mkdirs(tmpdir)
    for size in sizes:
        count = total_size / size
        fl = []
        for i in range(count):
            fn = "file-%s-%s" % (size, i)
            pn = os.path.join(tmpdir, fn)
            if not os.path.exists(pn):
                util.create_random_file(pn, size)
            fl.append(fn)
        files.append(fl)

    from metasyncAPI import Worker, ThreadPool
    from multiprocessing import cpu_count

    pool = ThreadPool(cpu_count())

    # try uploading each file
    result = [["Services"] + files]
    for cls in services.all_services:
        if cls in [services.DiskAPI]:
            continue
        row = [services.slug(cls)]
        srv = cls()
        if srv.exists('/concurrent_upload'):
            srv.rmdir('/concurrent_upload')
        srv.putdir('/concurrent_upload')
        print 'uploading:', row[0]

        for fl in files:
            beg = time.time()
            for f in fl:
                path = os.path.join(tmpdir, f)
                remote_path = '/concurrent_upload/%s' % f
                pool.submit(srv.copy, _put, path, remote_path)
            pool.join()
            end = time.time()
            row.append(end - beg)

        result.append(row)

    # tabularize
    for row in result:
        for e in row:
            print "%s\t" % e,
        print
Example #2
0
def test_concurrent_upload(metasync, opts):
    def _put(srv, path, remote_path):
        with open(path, "rb") as f:
            srv.put(remote_path, f.read())

    # bump files
    tmpdir = os.path.join(opts.tmpdir, "metasync-files")
    sizes = [1024, 2048, 4192, 8192, 1 * MB]
    files = []
    total_size = 1 * MB

    print tmpdir

    util.mkdirs(tmpdir)
    for size in sizes:
        count = total_size / size
        fl = []
        for i in range(count):
            fn = "file-%s-%s" % (size, i)
            pn = os.path.join(tmpdir, fn)
            if not os.path.exists(pn):
                util.create_random_file(pn, size)
            fl.append(fn)
        files.append(fl)

    from metasyncAPI import Worker, ThreadPool
    from multiprocessing import cpu_count

    pool = ThreadPool(cpu_count())

    # try uploading each file
    result = [["Services"] + files]
    for cls in services.all_services:
        if cls in [services.DiskAPI]:
            continue
        row = [services.slug(cls)]
        srv = cls()
        if srv.exists('/concurrent_upload'):
            srv.rmdir('/concurrent_upload')
        srv.putdir('/concurrent_upload')
        print 'uploading:', row[0]

        for fl in files:
            beg = time.time()
            for f in fl:
                path = os.path.join(tmpdir, f)
                remote_path = '/concurrent_upload/%s' % f
                pool.submit(srv.copy, _put, path, remote_path)
            pool.join()
            end = time.time()
            row.append(end - beg)

        result.append(row)

    # tabularize
    for row in result:
        for e in row:
            print "%s\t" % e,
        print
Example #3
0
 def new_index(srv, folder, prefix):
     if services.slug(srv) == 'onedrive':
         folder = '/Public' + folder
     if not srv.exists(folder):
         return 0
     files = srv.listdir(folder)
     cnt = 0
     for fn in files:
         if fn.startswith(prefix):
             cnt += 1
     return cnt
Example #4
0
 def new_index(srv, folder, prefix):
     if services.slug(srv) == 'onedrive':
         folder = '/Public' + folder
     if not srv.exists(folder):
         return 0
     files = srv.listdir(folder)
     cnt = 0
     for fn in files:
         if fn.startswith(prefix):
             cnt += 1
     return cnt
Example #5
0
def test_bench_upload(metasync, opts):
    "bencmark upload speed of storage services"

    # bump files
    tmpdir = os.path.join(opts.tmpdir, "metasync-files")
    sizes = [1024, 2048, 1 * MB]
    files = []

    # for real bench
    if opts.slow:
        sizes = [10 * MB, 100 * MB]

    util.mkdirs(tmpdir)
    for size in sizes:
        fn = "file-%s" % size
        pn = os.path.join(tmpdir, fn)
        if not os.path.exists(pn):
            util.create_random_file(pn, size)
        files.append(fn)

    # try uploading each file
    result = [["Services"] + files]
    for cls in services.all_services:
        if cls in [services.DiskAPI]:
            continue
        if opt.slow and cls in [services.BaiduAPI]:
            continue
        row = [services.slug(cls)]
        srv = cls()
        print 'uploading:', row[0]

        if srv.exists('/upload_test'):
            srv.rmdir('/upload_test')
        srv.putdir('/upload_test')

        for f in files:
            #if row[0] == 'baidu' and f == 'file-104857600':
            #    continue
            content = open(os.path.join(tmpdir, f), 'r').read()
            beg = time.time()
            srv.put('/upload_test/' + f, content)
            end = time.time()
            row.append(end - beg)

        result.append(row)

    # tabularize
    for row in result:
        for e in row:
            print "%s\t" % e,
        print
Example #6
0
def test_bench_upload(metasync, opts):
    "bencmark upload speed of storage services"

    # bump files
    tmpdir = os.path.join(opts.tmpdir, "metasync-files")
    sizes  = [1024, 2048, 1*MB]
    files  = []

    # for real bench
    if opts.slow:
        sizes = [10*MB, 100*MB]

    util.mkdirs(tmpdir)
    for size in sizes:
        fn = "file-%s" % size
        pn = os.path.join(tmpdir, fn)
        if not os.path.exists(pn):
            util.create_random_file(pn, size)
        files.append(fn)

    # try uploading each file
    result = [["Services"] + files]
    for cls in services.all_services:
        if cls in [services.DiskAPI]:
            continue
        if opt.slow and cls in [services.BaiduAPI]:
            continue
        row = [services.slug(cls)]
        srv = cls()
        print 'uploading:', row[0]

        if srv.exists('/upload_test'):
            srv.rmdir('/upload_test')
        srv.putdir('/upload_test')

        for f in files:
            #if row[0] == 'baidu' and f == 'file-104857600':
            #    continue
            content = open(os.path.join(tmpdir, f), 'r').read()
            beg = time.time()
            srv.put('/upload_test/' + f, content)
            end = time.time()
            row.append(end - beg)

        result.append(row)

    # tabularize
    for row in result:
        for e in row:
            print "%s\t" % e,
        print
Example #7
0
def test_bench_download(metasync, opts):
    "bencmark upload speed of storage services"

    # bump files
    sizes = [1024, 2048, 1 * MB]
    files = []

    # for real bench
    if opts.slow:
        sizes = [10 * MB, 100 * MB]

    for size in sizes:
        fn = "file-%s" % size
        files.append(fn)

    # try downloading each file
    result = [["Services"] + files]
    for cls in services.all_services:
        if cls in [services.DiskAPI]:
            continue
        if opt.slow and cls in [services.BaiduAPI]:
            continue
        row = [services.slug(cls)]
        srv = cls()
        print 'downloading:', row[0]

        if not srv.exists('/upload_test'):
            print 'Testing files no longer exist in %s' % row[0]
            return

        for f in files:
            #if row[0] == 'baidu' and f == 'file-104857600':
            #    continue
            beg = time.time()
            srv.get('/upload_test/' + f)
            end = time.time()
            row.append(end - beg)

        result.append(row)

    # tabularize
    for row in result:
        for e in row:
            print "%s\t" % e,
        print
Example #8
0
def test_bench_download(metasync, opts):
    "bencmark upload speed of storage services"

    # bump files
    sizes  = [1024, 2048, 1*MB]
    files  = []

    # for real bench
    if opts.slow:
        sizes = [10*MB, 100*MB]

    for size in sizes:
        fn = "file-%s" % size
        files.append(fn)

    # try downloading each file
    result = [["Services"] + files]
    for cls in services.all_services:
        if cls in [services.DiskAPI]:
            continue
        if opt.slow and cls in [services.BaiduAPI]:
            continue
        row = [services.slug(cls)]
        srv = cls()
        print 'downloading:', row[0]

        if not srv.exists('/upload_test'):
            print 'Testing files no longer exist in %s' % row[0]
            return

        for f in files:
            #if row[0] == 'baidu' and f == 'file-104857600':
            #    continue
            beg = time.time()
            srv.get('/upload_test/' + f)
            end = time.time()
            row.append(end - beg)

        result.append(row)

    # tabularize
    for row in result:
        for e in row:
            print "%s\t" % e,
        print