def update_pkg( df: pd.DataFrame, user: str, package: str, readme: Optional[str] = None, hash_key=None, ): r""" Parameters ---------- df user package readme hash_key Returns ------- """ pkg_path = f'{user}/{package}' quilt.build(pkg_path, quilt.nodes.GroupNode(dict(author='@hudlrd'))) quilt.build(f'{pkg_path}/df', quilt.nodes.DataNode(None, None, df, {})) # TODO: warn the user if readme if not provided if readme is not None: with NamedTemporaryFile() as tmp: tmp.write(readme.encode('UTF-8')) tmp.flush() quilt.build(f'{pkg_path}/README', tmp.model_name) quilt.login() quilt.push(pkg_path, is_public=True, hash=hash_key)
# %% os.makedirs(name="model/train", exist_ok=True) np.save(file="model/train/W1_data.npy", arr=rema) np.save(file="model/train/W2_data.npy", arr=measuresvelocity) np.save(file="model/train/W3_data.npy", arr=accumulation) np.save(file="model/train/X_data.npy", arr=lores) np.save(file="model/train/Y_data.npy", arr=hires) # %% [markdown] # ### Quilt # # Login -> Build -> Push # %% quilt.login() # %% # Tiled datasets for training neural network quilt.build(package="weiji14/deepbedmap/model/train/W1_data", path=rema) quilt.build(package="weiji14/deepbedmap/model/train/W2_data", path=measuresvelocity) quilt.build(package="weiji14/deepbedmap/model/train/W3_data", path=accumulation) quilt.build(package="weiji14/deepbedmap/model/train/X_data", path=lores) quilt.build(package="weiji14/deepbedmap/model/train/Y_data", path=hires) # %% # Original datasets for neural network predictions on bigger area quilt.build(package="weiji14/deepbedmap/lowres/bedmap2_bed", path="lowres/bedmap2_bed.tif")