def test_hdfs_file_exists(): random.seed() fn1 = f'{HDFS_TEST_PATH}/pysparkling_test_{random.random() * 999999.0:d}.txt' fn2 = f'{HDFS_TEST_PATH}/pysparkling_test_{random.random() * 999999.0:d}.txt' rdd = Context().parallelize(f'Hello World {x}' for x in range(10)) rdd.saveAsTextFile(fn1) assert File(fn1).exists() and not File(fn2).exists()
def test_hdfs_file_exists(): random.seed() fn1 = '{}/pysparkling_test_{:d}.txt'.format(HDFS_TEST_PATH, random.random() * 999999.0) fn2 = '{}/pysparkling_test_{:d}.txt'.format(HDFS_TEST_PATH, random.random() * 999999.0) rdd = Context().parallelize('Hello World {0}'.format(x) for x in range(10)) rdd.saveAsTextFile(fn1) assert File(fn1).exists() and not File(fn2).exists()
def test_hdfs_file_exists(): if not HDFS_TEST_PATH: raise SkipTest random.seed() fn1 = HDFS_TEST_PATH + '/pysparkling_test_{0}.txt'.format( int(random.random() * 999999.0)) fn2 = HDFS_TEST_PATH + '/pysparkling_test_{0}.txt'.format( int(random.random() * 999999.0)) rdd = Context().parallelize('Hello World {0}'.format(x) for x in range(10)) rdd.saveAsTextFile(fn1) assert File(fn1).exists() and not File(fn2).exists()
def test_dumpToFile(): if not AWS_ACCESS_KEY_ID or not S3_TEST_PATH: raise SkipTest random.seed() fn = '{}/pysparkling_test_{0}.pickle'.format( S3_TEST_PATH, int(random.random() * 999999.0)) File(fn).dump(pickle.dumps({'hello': 'world'}))
def test_dumpToFile(): random.seed() fn = f'{S3_TEST_PATH}/pysparkling_test_{random.random() * 999999.0:d}.pickle' File(fn).dump(pickle.dumps({'hello': 'world'}))
def test_dumpToFile(): random.seed() fn = '{}/pysparkling_test_{:d}.pickle'.format(S3_TEST_PATH, random.random() * 999999.0) File(fn).dump(pickle.dumps({'hello': 'world'}))