def test_modification_time_open_files(): with s3_context('compress', files) as s3: a = open_files('compress/test/accounts.*', s3=s3) b = open_files('compress/test/accounts.*', s3=s3) assert [aa._key for aa in a] == [bb._key for bb in b] with s3_context('compress', valmap(double, files)) as s3: c = open_files('compress/test/accounts.*', s3=s3) assert [aa._key for aa in a] != [cc._key for cc in c]
def test_registered_open_files(s3): from dask.bytes.core import open_files myfiles = open_files('s3://' + test_bucket_name + '/test/accounts.*.json', s3=s3) assert len(myfiles) == len(files) data = compute(*[file.read() for file in myfiles]) assert list(data) == [files[k] for k in sorted(files)]
def test_registered_open_files(s3): from dask.bytes.core import open_files myfiles = open_files('s3://%s/test/accounts.*.json' % test_bucket_name, s3=s3) assert len(myfiles) == len(files) data = compute(*[file.read() for file in myfiles]) assert list(data) == [files[k] for k in sorted(files)]
def test_files(s3): myfiles = open_files(test_bucket_name+'/test/accounts.*', s3=s3) assert len(myfiles) == len(files) data = compute(*[file.read() for file in myfiles]) assert list(data) == [files[k] for k in sorted(files)]
def test_files(s3): myfiles = open_files(test_bucket_name + '/test/accounts.*', s3=s3) assert len(myfiles) == len(files) data = compute(*[file.read() for file in myfiles]) assert list(data) == [files[k] for k in sorted(files)]