Exemplo n.º 1
0
def save(table: Iterable[Iterable[str]], fname: Union[str, Path], *, mode: str = 'w',
         heads: Optional[Iterable[str]] = None, **kwargs: Any) -> bool:
    checkOutputFile(fname)
    with open(fname, mode) as f:
        if available(heads): f.writelines(pickmap(heads, lambda x: x[-1] != '\n', lambda x: x + '\n'))
        csv.writer(f, **kwargs).writerows(table)
    return os.path.isfile(fname)
Exemplo n.º 2
0
def savelns(lines: Iterable[str],
            fname: Union[str, Path],
            mode='w',
            newline=True) -> bool:
    checkOutputFile(fname)
    if newline:
        lines = pickmap(lines, lambda x: x[-1] != '\n', lambda x: x + '\n')
    with open(fname, mode) as ofile:
        ofile.writelines(lines)
    return os.path.isfile(fname)
Exemplo n.º 3
0
    def offload(self, fname: Union[str, Path]) -> Table:
        if isinstance(self._dmatx, np.memmap): logging.warning('Table already offloaded, skip'); return self

        checkOutputFile(fname)
        mdmatx = np.memmap(fname, dtype = self.dtype, mode = 'w+', shape = self.shape)
        mdmatx[:] = self._dmatx
        self._dmatx = mdmatx

        self._memmap = Metadata(file = Path(fname), dtype = self.dtype, shape = self.shape)
        return self
Exemplo n.º 4
0
    def savehdf(self, fname: Union[str, Path], compression: int = 0) -> bool:
        checkOutputFile(fname)
        hdf = ptb.open_file(fname, mode = 'w', filters = ptb.Filters(compression))

        darr = hdf.create_array(hdf.root, 'DataMatx', self._dmatx)
        for k,v in self._metas.items(): setattr(darr.attrs, k, v)

        if available(self._rnames): hdf.create_array(hdf.root, 'RowNames', np.array(self._rnames))
        if available(self._cnames): hdf.create_array(hdf.root, 'ColNames', np.array(self._cnames))
        if available(self._rindex): self._rindex.tohtable(hdf.root, 'RowIndex')
        if available(self._cindex): self._cindex.tohtable(hdf.root, 'ColIndex')

        hdf.close()
        return os.path.isfile(fname)
Exemplo n.º 5
0
    def saverdata(self, fname, *, dataobj: str = 'data.matrix',
                  ridxobj: Optional[str] = 'row.index', cidxobj: Optional[str] = 'col.index', transpose: bool = True) -> bool:
        if missing(rw): raise RuntimeError('RWrapper not available for this installation')
        checkOutputFile(fname)

        dm, rn, cn, ri, ci = (self._dmatx,   self._rnames, self._cnames, self._rindex, self._cindex) if not transpose else \
                             (self._dmatx.T, self._cnames, self._rnames, self._cindex, self._rindex)

        dmtx = rw.asMatrix(dm, rownames = rn, colnames = cn)
        rw.assign(dmtx, dataobj)

        if available(ri): rw.assign(rw.r['data.frame'](**{k: rw.asVector(v) for k,v in ri.fields}), ridxobj)
        if available(ci): rw.assign(rw.r['data.frame'](**{k: rw.asVector(v) for k,v in ci.fields}), cidxobj)

        vnames = [dataobj] + ([ridxobj] if available(ri) else []) + ([cidxobj] if available(ci) else [])
        rw.run(f'save({paste(vnames, sep = ",")}, file = "{fname}")') # avoid bug in rw.save
        return os.path.isfile(fname)
Exemplo n.º 6
0
def save(txt: str, fname: Union[str, Path], mode: str = 'w') -> bool:
    checkOutputFile(fname)
    with open(fname, mode) as f:
        f.write(txt)
    return os.path.isfile(fname)
Exemplo n.º 7
0
 def savehdf(self, fname: Union[str, Path], compression: int = 0) -> bool:
     checkOutputFile(fname)
     with tb.open_file(fname, mode='w',
                       filters=tb.Filters(compression)) as hdf:
         self.tohtable(hdf.root, 'StructuredArray')
     return os.path.isfile(fname)