def delete_dataset(dataset_uuid=None, store=None, factory=None): """ Parameters ---------- """ ds_factory = _ensure_factory( dataset_uuid=dataset_uuid, load_schema=False, store=_make_callable(store), factory=factory, load_dataset_metadata=False, ) # Remove possibly unreferenced files garbage_collect_dataset(factory=ds_factory) # Delete indices first since they do not affect dataset integrity delete_indices(dataset_factory=ds_factory) for metapartition in dispatch_metapartitions_from_factory(ds_factory): metapartition = cast(MetaPartition, metapartition) metapartition.delete_from_store(dataset_uuid=dataset_uuid, store=store) # delete common metadata after partitions delete_common_metadata(dataset_factory=ds_factory) # Delete the top level metadata file delete_top_level_metadata(dataset_factory=ds_factory)
def _delete_tl_metadata(dataset_factory, *args): """ This function serves as a collector function for delayed objects. Therefore allowing additional arguments which are not used. """ delete_top_level_metadata(dataset_factory=dataset_factory)