def _rename(context, old_path, new_path): if (not (yield _exists(context, old_path))): raise HTTPServerError(400, "Source does not exist") if (yield _exists(context, new_path)): raise HTTPServerError(400, "Target already exists") type = yield _type_from_path(context, old_path) old_key = _key(context, old_path) new_key = _key(context, new_path) def replace_key_prefix(string): return new_key + string[len(old_key):] object_key = \ [] if type == 'directory' else \ [(old_key, new_key)] renames = object_key + [ (key, replace_key_prefix(key)) for (key, _) in (yield _list_all_descendant_keys(context, old_key + '/')) ] # We can't really do a transaction on S3, and not sure if we can trust that on any error # from DELETE, that the DELETE hasn't happened: even checking if the file is still there # isn't bulletproof due to eventual consistency. So we risk duplicate files over risking # deleted files for (old_key, new_key) in object_key + sorted(renames, key=lambda k: _copy_sort_key(k[0])): yield _copy_key(context, old_key, new_key) for (old_key, _) in sorted(renames, key=lambda k: _delete_sort_key(k[0])) + object_key: yield _delete_key(context, old_key) return (yield _get(context, new_path, content=False, type=None, format=None))
def get_notebook_checkpoint(self, checkpoint_id, path): log_prefix = 'CasperCheckpoints.get_notebook_checkpoint' self.log.info('%s: enter, checkpoint_id=%r, path=%r', log_prefix, checkpoint_id, path) # normalize path path = '/' + path.strip('/') voci_path = path[1:] if checkpoint_id != CHECKPOINT_ID: raise HTTPServerError( 404, 'Checkpoint does not exist: {}@{}'.format(path, checkpoint_id)) virtual_casper = VirtualCasperMgr.get() try: r = virtual_casper.get_object(voci_path + FileExtension.CHECKPOINT.value) except oci.exceptions.ServiceError as e: if e.status == 404: raise HTTPServerError( 404, 'Checkpoint does not exist: {}@{}'.format( path, checkpoint_id)) raise with io.StringIO(r.data.text) as f: content = nbformat.read(f, 4) self.log.info('%s: exit', log_prefix) return { 'type': FileType.NOTEBOOK.value, 'content': content, }
def _rename_notebook(config, old_path, new_path): if (not (yield _check_if_exists(config, old_path))): raise HTTPServerError(400, "Source does not exist") if (yield _check_if_exists(config, new_path)): raise HTTPServerError(400, "Target already exists") type = yield _get_type(config, old_path) old_key = _get_key(config, old_path) new_key = _get_key(config, new_path) def replace_key_prefix(string): return new_key + string[len(old_key):] object_key = \ [] if type == 'directory' else \ [(old_key, new_key)] renames = object_key + [ (key, replace_key_prefix(key)) for (key, _) in (yield _list_all_successor_keys(config, old_key + '/')) ] for (old_key, new_key) in object_key + sorted(renames, key=lambda k: _get_copy_order_key(k[0])): yield _copy_key_object(config, old_key, new_key) for (old_key, _) in sorted(renames, key=lambda k: _delete_order_key(k[0])) + object_key: yield _delete_key_object(config, old_key) return (yield _get_model(config, new_path, content=False, type=None, format=None))
def _copy(context, from_path, to_path): model = yield _get(context, from_path, content=False, type=None, format=None) if model['type'] == 'directory': raise HTTPServerError(400, "Can't copy directories") from_dir, from_name = \ from_path.rsplit('/', 1) if '/' in from_path else \ ('', from_path) to_path = \ to_path if to_path is not None else \ from_dir if (yield _dir_exists(context, to_path)): copy_pat = re.compile(r'\-Copy\d*\.') name = copy_pat.sub(u'.', from_name) to_name = yield _increment_filename(context, name, to_path, insert='-Copy') to_path = u'{0}/{1}'.format(to_path, to_name) from_key = _key(context, from_path) to_key = _key(context, to_path) yield _copy_key(context, from_key, to_key) return { **model, 'name': to_name, 'path': to_path, }
def _new_untitled(context, path, type, ext): if not (yield _dir_exists(context, path)): raise HTTPServerError(404, 'No such directory: %s' % path) model_type = \ type if type else \ 'notebook' if ext == '.ipynb' else \ 'file' untitled = \ UNTITLED_DIRECTORY if model_type == 'directory' else \ UNTITLED_NOTEBOOK if model_type == 'notebook' else \ UNTITLED_FILE insert = \ ' ' if model_type == 'directory' else \ '' ext = \ '.ipynb' if model_type == 'notebook' else \ ext name = yield _increment_filename(context, untitled + ext, path, insert=insert) path = u'{0}/{1}'.format(path, name) model = { 'type': model_type, } return (yield _new(context, model, path))
def _new_untitled_notebook(config, path, type, ext): if not (yield _check_directory_exists(config, path)): raise HTTPServerError(404, 'No such directory: %s' % path) model_type = \ type if type else \ 'notebook' if ext == '.ipynb' else \ 'file' untitled = \ UNTITLED_FOLDER if model_type == 'directory' else \ UNTITLED_NB if model_type == 'notebook' else \ UNTITLED_FILE insert = \ ' ' if model_type == 'directory' else \ '' ext = \ '.ipynb' if model_type == 'notebook' else \ ext name = yield _get_next_filename(config, untitled + ext, path, insert=insert) path = u'{0}/{1}'.format(path, name) model = { 'type': model_type, } return (yield _get_new_notebook(config, model, path))
def key_exists(): key = _key(context, path) try: response = yield _make_s3_request(context, 'HEAD', '/' + key, {}, {}, b'') except HTTPClientError as exception: if exception.response.code != 404: raise HTTPServerError(exception.response.code, 'Error checking if S3 exists') response = exception.response return response.code == 200
def _check_if_key_exists(): key = _get_key(config, path) try: response = yield _head_object(config, config.bucket_name, key) except HTTPClientError as exception: if exception.response.code != 404: raise HTTPServerError(exception.response.code, 'Error checking if S3 exists') response = exception.response return response
def get_file_checkpoint(self, checkpoint_id, path): log_prefix = 'CasperCheckpoints.get_file_checkpoint' self.log.info('%s: enter, checkpoint_id=%r, path=%r', log_prefix, checkpoint_id, path) # normalize path path = '/' + path.strip('/') voci_path = path[1:] if checkpoint_id != CHECKPOINT_ID: raise HTTPServerError( 404, 'Checkpoint does not exist: {}@{}'.format(path, checkpoint_id)) virtual_casper = VirtualCasperMgr.get() try: r = virtual_casper.get_object(voci_path + FileExtension.CHECKPOINT.value) except oci.exceptions.ServiceError as e: if e.status == 404: raise HTTPServerError( 404, 'Checkpoint does not exist: {}@{}'.format( path, checkpoint_id)) raise if r.data.content[0] == 0: content = r.data.content[1:].decode('utf-8') format = FileFormat.TEXT.value elif r.data.content[0] == 1: content = base64.b64encode(r.data.content[1:]).decode('utf-8') format = FileFormat.BASE64.value else: raise Exception("Checkpoint corrupted: {}".format(r.data.content)) self.log.info('%s: exit', log_prefix) return { 'type': FileType.FILE.value, 'content': content, 'format': format }
def _delete(context, path): if not path: raise HTTPServerError(400, "Can't delete root") type = _type_from_path(context, path) root_key = _key(context, path) object_key = \ [] if type == 'directory' else \ [root_key] descendant_keys = [ key for (key, _) in (yield _list_all_descendant_keys(context, root_key + '/')) ] for key in sorted(descendant_keys, key=_delete_sort_key) + object_key: yield _delete_key(context, key)
def _delete_notebook(config, path): if not path: raise HTTPServerError(400, "Can't delete root") type = _get_type(config, path) root_key = _get_key(config, path) object_key = \ [] if type == 'directory' else \ [root_key] descendant_keys = [ key for (key, _) in (yield _list_all_successor_keys(config, root_key + '/')) ] for key in sorted(descendant_keys, key=_delete_order_key) + object_key: yield _delete_key_object(config, key)