def test_ssh_hive_creation(): uri = 'hive://hdfs@%s:10000/default::ssh_1' % host try: t = into(uri, ssh_csv) assert isinstance(t, sa.Table) assert len(into(list, t)) > 0 finally: drop(t)
def test_ssh_directory_hive_creation(): uri = 'hive://hdfs@%s:10000/default::ssh_2' % host try: t = into(uri, ssh_directory) assert isinstance(t, sa.Table) assert discover(t) == ds assert len(into(list, t)) > 0 finally: drop(t)
def test_drop_group(): with tmpfile('.hdf5') as fn: f = h5py.File(fn) try: f.create_dataset('/group/data', data=x, chunks=True, maxshape=(None,) + x.shape[1:]) drop(f['/group']) assert '/group' not in f.keys() finally: f.close()
def test_drop_dataset(): with tmpfile('.hdf5') as fn: f = h5py.File(fn) try: data = f.create_dataset('/data', data=x, chunks=True, maxshape=(None,) + x.shape[1:]) drop(data) assert '/data' not in f.keys() finally: with ignoring(Exception): f.close()
def test_ssh_hive_creation_with_full_urls(): uri = 'hive://hdfs@%s:10000/default::ssh_3' % host try: t = into(uri, 'ssh://ubuntu@%s:accounts.csv' % host, key_filename=os.path.expanduser('~/.ssh/cdh_testing.key')) assert isinstance(t, sa.Table) n = len(into(list, t)) assert n > 0 # Load it again into(t, 'ssh://ubuntu@%s:accounts.csv' % host, key_filename=os.path.expanduser('~/.ssh/cdh_testing.key')) # Doubles length assert len(into(list, t)) == 2 * n finally: drop(t)
def test_drop(sql): assert sql.exists(sql.bind) drop(sql) assert not sql.exists(sql.bind)
def test_drop_file(): with file(x) as (fn, f, data): drop(f) assert not os.path.exists(fn)