def test_filesystem_factory(mockfs, paths_or_selector): format = ds.ParquetFileFormat(reader_options=dict(dict_columns={"str"})) options = ds.FileSystemFactoryOptions('subdir') options.partitioning = ds.DirectoryPartitioning( pa.schema( [pa.field('group', pa.int32()), pa.field('key', pa.string())])) assert options.partition_base_dir == 'subdir' assert options.ignore_prefixes == ['.', '_'] assert options.exclude_invalid_files is False factory = ds.FileSystemDatasetFactory(mockfs, paths_or_selector, format, options) inspected_schema = factory.inspect() assert factory.inspect().equals(pa.schema([ pa.field('i64', pa.int64()), pa.field('f64', pa.float64()), pa.field('str', pa.dictionary(pa.int32(), pa.string())), pa.field('group', pa.int32()), pa.field('key', pa.string()), ]), check_metadata=False) assert isinstance(factory.inspect_schemas(), list) assert isinstance(factory.finish(inspected_schema), ds.FileSystemDataset) assert factory.root_partition.equals(ds.ScalarExpression(True)) dataset = factory.finish() assert isinstance(dataset, ds.FileSystemDataset) assert len(list(dataset.scan())) == 2 scanner = ds.Scanner(dataset) expected_i64 = pa.array([0, 1, 2, 3, 4], type=pa.int64()) expected_f64 = pa.array([0, 1, 2, 3, 4], type=pa.float64()) expected_str = pa.DictionaryArray.from_arrays( pa.array([0, 1, 2, 3, 4], type=pa.int32()), pa.array("0 1 2 3 4".split(), type=pa.string())) for task, group, key in zip(scanner.scan(), [1, 2], ['xxx', 'yyy']): expected_group_column = pa.array([group] * 5, type=pa.int32()) expected_key_column = pa.array([key] * 5, type=pa.string()) for batch in task.execute(): assert batch.num_columns == 5 assert batch[0].equals(expected_i64) assert batch[1].equals(expected_f64) assert batch[2].equals(expected_str) assert batch[3].equals(expected_group_column) assert batch[4].equals(expected_key_column) table = dataset.to_table() assert isinstance(table, pa.Table) assert len(table) == 10 assert table.num_columns == 5
def dataset(mockfs): format = ds.ParquetFileFormat() selector = fs.FileSelector('subdir', recursive=True) options = ds.FileSystemFactoryOptions('subdir') options.partitioning = ds.DirectoryPartitioning( pa.schema( [pa.field('group', pa.int32()), pa.field('key', pa.string())])) factory = ds.FileSystemSourceFactory(mockfs, selector, format, options) schema = factory.inspect() source = factory.finish() return ds.Dataset([source], schema)
def test_file_system_factory(mockfs, paths_or_selector): format = ds.ParquetFileFormat() options = ds.FileSystemFactoryOptions('subdir') options.partitioning = ds.DirectoryPartitioning( pa.schema([ pa.field('group', pa.int32()), pa.field('key', pa.string()) ]) ) assert options.partition_base_dir == 'subdir' assert options.ignore_prefixes == ['.', '_'] assert options.exclude_invalid_files is True factory = ds.FileSystemSourceFactory( mockfs, paths_or_selector, format, options ) inspected_schema = factory.inspect() assert isinstance(factory.inspect(), pa.Schema) assert isinstance(factory.inspect_schemas(), list) assert isinstance(factory.finish(inspected_schema), ds.FileSystemSource) assert factory.root_partition.equals(ds.ScalarExpression(True)) source = factory.finish() assert isinstance(source, ds.Source) dataset = ds.Dataset([source], inspected_schema) scanner = dataset.new_scan().finish() assert len(list(scanner.scan())) == 2 expected_i64 = pa.array([0, 1, 2, 3, 4], type=pa.int64()) expected_f64 = pa.array([0, 1, 2, 3, 4], type=pa.float64()) for task, group, key in zip(scanner.scan(), [1, 2], ['xxx', 'yyy']): expected_group_column = pa.array([group] * 5, type=pa.int32()) expected_key_column = pa.array([key] * 5, type=pa.string()) for batch in task.execute(): assert batch.num_columns == 4 assert batch[0].equals(expected_i64) assert batch[1].equals(expected_f64) assert batch[2].equals(expected_group_column) assert batch[3].equals(expected_key_column) table = scanner.to_table() assert isinstance(table, pa.Table) assert len(table) == 10 assert table.num_columns == 4
def test_partitioning(): schema = pa.schema( [pa.field('i64', pa.int64()), pa.field('f64', pa.float64())]) for klass in [ds.DirectoryPartitioning, ds.HivePartitioning]: partitioning = klass(schema) assert isinstance(partitioning, ds.Partitioning) partitioning = ds.DirectoryPartitioning( pa.schema( [pa.field('group', pa.int64()), pa.field('key', pa.float64())])) expr = partitioning.parse('/3/3.14') assert isinstance(expr, ds.Expression) expected = ds.AndExpression( ds.ComparisonExpression(ds.CompareOperator.Equal, ds.FieldExpression('group'), ds.ScalarExpression(3)), ds.ComparisonExpression(ds.CompareOperator.Equal, ds.FieldExpression('key'), ds.ScalarExpression(3.14))) assert expr.equals(expected) with pytest.raises(pa.ArrowInvalid): partitioning.parse('/prefix/3/aaa') partitioning = ds.HivePartitioning( pa.schema( [pa.field('alpha', pa.int64()), pa.field('beta', pa.int64())])) expr = partitioning.parse('/alpha=0/beta=3') expected = ds.AndExpression( ds.ComparisonExpression(ds.CompareOperator.Equal, ds.FieldExpression('alpha'), ds.ScalarExpression(0)), ds.ComparisonExpression(ds.CompareOperator.Equal, ds.FieldExpression('beta'), ds.ScalarExpression(3))) assert expr.equals(expected)