Esempio n. 1
0
def load_objects_from_storage(metadata_filename: str,
                              class_factory: GenericObject,
                              root_directory: Path) -> dict:
    return excepting_pipe(list_files(root_directory, metadata_filename),
                          curry(map)(load_json),
                          curry(map)(class_factory.from_json),
                          curry(map)(lambda _: (_.id, _)), dict)
Esempio n. 2
0
 def delete_version(self, dataset_id: UUID,
                    version_id: UUID) -> Union[Exception, DatasetVersion]:
     return excepting_pipe(
         version_id,
         self.get_version(ensure_uuid(dataset_id)),
         self.check_version_delete,
         # Remove the version directory
         curry(do)(lambda v: shutil.rmtree(v.storage_path)),
         self.unload_version)
Esempio n. 3
0
 def delete_dataset(self, dataset_id: UUID) -> Union[Exception, Dataset]:
     return excepting_pipe(
         ensure_uuid(dataset_id),
         self.get_dataset,
         # Remove complete dataset directory
         curry(do)(lambda ds: shutil.rmtree(ds.storage_path)),
         # Unload dataset from data manager
         self.unload_dataset,
         # Unload all dataset versions from data manager
         self.unload_dataset_versions)
Esempio n. 4
0
 def list_snapshots_filtered(self, model_id: UUID, git_commit_id: str,
                             query: dict) -> Mapping[UUID, ModelSnapshot]:
     return excepting_pipe(
         self._snapshots.values(),
         curry(filter)(lambda v: v.model.id == ensure_uuid(model_id)),
         curry(filter)(lambda v: v.model_git_commit == git_commit_id),
         curry(sorted,
               key=lambda snap: getattr(snap, query['sortby']),
               reverse=query['order']),
         curry(partition_all)(query['limit'] if query['limit'] > 0 else len(
             self._snapshots.values())), list,
         curry(get, default=[])(query['offset']))
Esempio n. 5
0
 def list_results(self, query: dict) -> Iterator[Result]:
     """
     List all list_results
     :param query:
     :return:
     """
     return excepting_pipe(
         self._results.values(),
         curry(sorted,
               key=lambda x: getattr(x, query['sortby']),
               reverse=query['order']),
         curry(partition_all)(query['limit'] if query['limit'] > 0 else len(
             self._results.values())), list,
         curry(get, default=[])(query['offset']))
Esempio n. 6
0
 def list_snapshots(self, query: dict) -> Iterator[ModelSnapshot]:
     """
     List all datasets
     :param query:
     :return:
     """
     # TODO: Not completely pure pipeline
     return excepting_pipe(
         self._snapshots.values(),
         curry(sorted,
               key=lambda snap: getattr(snap, query['sortby']),
               reverse=query['order']),
         curry(partition_all)(query['limit'] if query['limit'] > 0 else len(
             self._snapshots.values())), list,
         curry(get, default=[])(query['offset']))
Esempio n. 7
0
 def versions(self, dataset_id: UUID,
              query: dict) -> Iterator[DatasetVersion]:
     """
     Lists all versions of a given dataset
     :param dataset_id:
     :param query:
     :return:
     """
     # TODO: Not completely pure pipeline
     return excepting_pipe(
         self._versions.values(),
         curry(filter)(lambda v: v.parent_id == ensure_uuid(dataset_id)),
         curry(sorted,
               key=lambda ds: getattr(ds, query['sortby']),
               reverse=query['order']), partition_versions(query), list,
         curry(get, default=[])(query['offset']))
Esempio n. 8
0
    def add_version(self, req: Request,
                    dataset_id: str) -> Union[Exception, DatasetVersion]:
        file = process_multi_part_upload(req)

        if file.get('temp_dir', False):
            return excepting_pipe(
                file, extract_tarball_to_temp_dir,
                create_object_from_archive(
                    dataset_id, self._config.dataset_filename, DatasetVersion,
                    self._config.dataset_base_dir,
                    self._config.version_meta_attributes),
                ensure_no_metadata_files_present(
                    self._config.metadata_file_list), move_to_storage,
                DatasetManager.store_version(self,
                                             self._config.version_filename))
        else:
            # Return feedback on current progress
            return file
Esempio n. 9
0
    def on_post(self, req: Request, resp: Response):
        result = process_multi_part_upload(req)

        if isinstance(result, Exception):
            resp.body = json.dumps(dict(error=str(result)))
            resp.status = falcon.HTTP_500
        elif isinstance(result, dict):
            if result.get('temp_dir', False):
                result = excepting_pipe(result, extract_tarball_to_temp_dir)
                if isinstance(result, Exception):
                    resp.body = json.dumps(dict(error=str(result)))
                    resp.status = falcon.HTTP_500
                else:
                    resp.body = json.dumps(result, cls=DataclassJSONEncoder)
                    resp.status = falcon.HTTP_201
            else:
                resp.body = json.dumps(result)
                resp.status = falcon.HTTP_200
Esempio n. 10
0
 def dataset_version_count(self, dataset_id: UUID) -> Union[Exception, int]:
     return excepting_pipe(
         self._versions.values(),
         curry(filter)(
             lambda version: version.parent_id == ensure_uuid(dataset_id)),
         list, len)
Esempio n. 11
0
 def list_dataset_versions(self, dataset: Dataset) -> Iterator[str]:
     return excepting_pipe(
         self._versions.values(),
         curry(filter)(lambda v: v.parent_id == dataset.id),
         curry(map)(lambda ds: ds.id))