def test_store_zip_parquet_c(self) -> None:

        f1, f2 = get_test_framesB()

        config = StoreConfig(
            index_depth=1,
            include_index=True,
            index_constructors=IndexDate,
            columns_depth=1,
            include_columns=True,
        )

        with temp_file('.zip') as fp:
            st = StoreZipParquet(fp)
            st.write(((f.name, f) for f in (f1, f2)), config=config)

            post = tuple(
                st.read_many(
                    ('a', 'b'),
                    container_type=Frame,
                    config=config,
                ))

            self.assertIs(post[0].index.__class__, IndexDate)
            self.assertIs(post[1].index.__class__, IndexDate)
Ejemplo n.º 2
0
    def to_zip_parquet(self,
                       fp: PathSpecifier,
                       config: StoreConfigMapInitializer = None) -> None:
        '''
        Write the complete :obj:`Bus` as a zipped archive of parquet files.

        {args}
        '''
        store = StoreZipParquet(fp)
        config = config if not config is None else self._config
        store.write(self.items(), config=config)
Ejemplo n.º 3
0
    def to_zip_parquet(self,
                       fp: PathSpecifier,
                       *,
                       config: StoreConfigMapInitializer = None) -> None:
        '''
        Write the complete :obj:`Bus` as a zipped archive of parquet files.

        {args}
        '''
        store = StoreZipParquet(fp)
        config = self._filter_config(config)
        store.write(self._items_store(), config=config)
Ejemplo n.º 4
0
    def test_store_zip_parquet_b(self) -> None:

        f1, f2, f3 = get_test_framesA()

        with temp_file('.zip') as fp:
            for read_max_workers in (1, 2):
                config = StoreConfig(index_depth=1, include_index=True, columns_depth=1, read_max_workers=read_max_workers)
                st = StoreZipParquet(fp)
                st.write((f.name, f) for f in (f1, f2, f3))

                post = tuple(st.read_many(('baz', 'bar', 'foo'), config=config))
                self.assertEqual(len(post), 3)
                self.assertEqual(post[0].name, 'baz')
                self.assertEqual(post[1].name, 'bar')
                self.assertEqual(post[2].name, 'foo')
    def test_store_zip_parquet_a(self) -> None:

        f1, f2, f3 = get_test_framesA()

        with temp_file('.zip') as fp:
            for read_max_workers in (1, 2):
                config = StoreConfig(index_depth=1,
                                     include_index=True,
                                     columns_depth=1,
                                     read_max_workers=read_max_workers)

                st = StoreZipParquet(fp)
                st.write((f.name, f) for f in (f1, f2, f3))

                f1_post = st.read('foo', config=config)
                self.assertTrue(
                    f1.equals(f1_post, compare_name=True, compare_class=True))

                f2_post = st.read('bar', config=config)
                self.assertTrue(
                    f2.equals(f2_post, compare_name=True, compare_class=True))

                f3_post = st.read('baz', config=config)
                self.assertTrue(
                    f3.equals(f3_post, compare_name=True, compare_class=True))
Ejemplo n.º 6
0
    def from_zip_parquet(
            cls,
            fp: PathSpecifier,
            config: StoreConfigMapInitializer = None) -> 'StoreClientMixin':
        '''
        Given a file path to zipped parquet :obj:`Bus` store, return a :obj:`Bus` instance.

        {args}
        '''
        store = StoreZipParquet(fp)
        return cls._from_store(store, config)  #type: ignore
Ejemplo n.º 7
0
    def from_zip_parquet(
        cls,
        fp: PathSpecifier,
        config: StoreConfigMapInitializer = None,
        max_persist: tp.Optional[int] = None,
    ) -> 'StoreClientMixin':
        '''
        Given a file path to zipped parquet :obj:`Bus` store, return a :obj:`Bus` instance.

        {args}
        '''
        store = StoreZipParquet(fp)
        return cls._from_store(
            store,  #type: ignore
            config=config,
            max_persist=max_persist,
        )
Ejemplo n.º 8
0
    def from_zip_parquet(cls,
            fp: PathSpecifier,
            *,
            config: StoreConfigMapInitializer = None,
            max_persist: tp.Optional[int] = None,
            index_constructor: IndexConstructor = None,
            ) -> 'Bus':
        '''
        Given a file path to zipped parquet :obj:`Bus` store, return a :obj:`Bus` instance.

        {args}
        '''
        store = StoreZipParquet(fp)
        return cls._from_store(store,
                config=config,
                max_persist=max_persist,
                index_constructor=index_constructor,
                )
Ejemplo n.º 9
0
    def from_zip_parquet(cls,
            fp: PathSpecifier,
            *,
            config: StoreConfigMapInitializer = None,
            axis: int = 0,
            retain_labels: bool,
            deepcopy_from_bus: bool = False,
            max_persist: tp.Optional[int] = None,
            ) -> 'Quilt':
        '''
        Given a file path to zipped parquet :obj:`Quilt` store, return a :obj:`Quilt` instance.

        {args}
        '''
        store = StoreZipParquet(fp)
        return cls._from_store(store,
                config=config,
                axis=axis,
                retain_labels=retain_labels,
                deepcopy_from_bus=deepcopy_from_bus,
                max_persist=max_persist,
                )
Ejemplo n.º 10
0
    def from_zip_parquet(
        cls,
        fp: PathSpecifier,
        *,
        config: StoreConfigMapInitializer = None,
        max_workers: tp.Optional[int] = None,
        chunksize: int = 1,
        use_threads: bool = False,
    ) -> 'Batch':
        '''
        Given a file path to zipped parquet :obj:`Batch` store, return a :obj:`Batch` instance.

        {args}
        '''
        store = StoreZipParquet(fp)
        return cls._from_store(
            store,
            config=config,
            max_workers=max_workers,
            chunksize=chunksize,
            use_threads=use_threads,
        )