def trash(resource): """Move resource `resource` to trash bin Arguments: resource (Resource): Resource to move to trash Returns: None """ trash_path = resource.path.parent + lib.TRASH if not lib.Path.CONTAINER in trash_path.as_str: # Open Metadata only bothers with .meta subfolders. # In cases where we trash the .meta folder, we'll # have to store it underneath an additional .meta trash_path = trash_path + lib.Path.CONTAINER # Ensure resource.path.name is unique in trash_path, as per RFC14 if os.path.exists(trash_path.as_str): for match in util.find_all(trash_path.as_str, resource.path.name): match_path = trash_path + match _remove(match_path.as_str) log.info("remove(): Removing exisisting " "%r from trash" % match_path.name) basename = resource.path.basename log.info("Trashing basename: %s" % basename) log.info("Fullname: %s" % resource.path.as_str) deleted_path = trash_path + basename assert not os.path.exists(deleted_path.as_str), deleted_path _move(resource.path.as_str, deleted_path.as_str) log.info("remove(): Successfully removed %r" % resource.path.as_str)
def trash(node): trash_path = node.path.parent + lib.TRASH if not lib.Path.CONTAINER in trash_path.as_str: trash_path = trash_path + lib.Path.CONTAINER # Ensure node.path.name is unique in trash_path, as per RFC14 if service.exists(trash_path.as_str): for match in util.find_all(trash_path.as_str, node.path.name): match_path = trash_path + match service.remove(match_path.as_str) log.info("remove(): Removing exisisting " "%r from trash" % match_path.name) basename = node.path.basename log.info("Trashing basename: %s" % basename) log.info("Fullname: %s" % node.path.as_str) deleted_path = trash_path + basename assert not service.exists(deleted_path.as_str) service.move(node.path.as_str, deleted_path.as_str) log.info("remove(): Successfully removed %r" % node.path.as_str)
def pull(resource, lazy=False, depth=1, merge=False, _currentlevel=1): """Physically retrieve value from datastore. Arguments: lazy (bool): Only pull if no existing value already exists depth (int): Pull `resource` and `depth` levels of children merge (bool): Combine results with existing value of `resource` Raises: error.Exists Returns: Resource: The originally passed resource, to facilitate for chaining commands. """ path = resource.path if not os.path.exists(path.as_str): """ If the name of `resource` has been entered manually, chances are that there is an existing resource on disk under a different suffix. If so, find a matching name, under any suffix, and assume this is the one the user intended to pull from. """ similars = list() for similar in util.find_all(path.parent.as_str, path.name): similars.append(similar) if len(similars) > 1: raise error.Duplicate("Duplicate entries found " "@ {}".format(path)) try: resource._path = path.copy(path=similars[0]) except IndexError: raise error.Exists("{} does not exist".format(path)) else: return pull(resource, lazy=lazy, depth=depth, merge=merge, _currentlevel=_currentlevel) # if not (isinstance(resource, Location) or resource.type): # raise error.Corrupt( # "Resource did not have type: {}".format(path)) if lazy and resource.has_value: return resource if not merge: resource.clear() path = path.as_str if os.path.isdir(path): for _, dirs, files in os.walk(path): for entry in dirs: Entry(entry, parent=resource) for entry in files: Entry(entry, parent=resource) break else: try: with open(path, 'r') as f: value = f.read() except IOError as e: if e.errno == errno.ENOENT: raise error.Exists(path) elif e.errno == errno.EACCES: raise error.Exists("Make sure this is a file " "and that you have the appropriate " "permissions: {}".format(path)) # Empty files return an empty string if value != "": resource.load(value) # Continue pulling children until `depth` is reached if _currentlevel < depth: if resource.type in ('dict', 'list'): for child in resource: pull(child, lazy=lazy, depth=depth, merge=merge, _currentlevel=_currentlevel + 1) resource.isdirty = False return resource