def setAttributesFromNew(self, idObject, objDict, uid,gid, context): obj = ObjectProperties() for key in objDict: obj.__setattr__(key,objDict[key]) self.metaObj.setProperty(idObject, OF.PROPERTY_OBJECT, obj)
def getObjectProperties(self,bucket_name,object_name): object_path = self._getObjectPath(bucket_name,object_name) path = os.path.join(object_path,"object.xml") if not os.path.exists(path): return None data = self._readFile(path) myDict = XmlToDict(data).getDict() prop = ObjectProperties() prop.setByDict(myDict['Global']) return prop
def createBucket(self, bucket_name, acp, location, uid, gid, context): #FIXME Check Permission !!!(one kind?) prop = BucketProperties(uid, gid, bucket_name, location) root = ObjectProperties(bucket_name+".ROOT", bucket_name, '/', uid, gid, object_type=OF.TYPE_ROOT) self._LOGGER.info("Create bucket metadatas") self.metaBuck.create(bucket_name, prop, location) #for the root self.metaObj.create(root.id,root,acp)
def createObject (self, bucket_name, path, object_type, mode, uid, gid, context): self._LOGGER.info("Create object") if path == '/': return OF.EACCESS if path[0] != '/': return OF.EACCESS if path[-1] == '/': path= path[:-1] s_pos = path.rfind('/') # Check exist objId = self.lookup( bucket_name, path) if objId: return OF.EACCESS #Split path self._LOGGER.info("Splitting path object") parent_path = path[:s_pos] object_name = path[s_pos+1:] self._LOGGER.debug('parent_path : %s' % parent_path) self._LOGGER.debug('object_name : %s' % object_name) # FIXME CHECK NAME #........ #Check permission idFather = self.access(bucket_name, parent_path, context ) if idFather == OF.EACCESS or idFather == OF.ENOENT: return idFather fatherObj = self.metaObj.getProperty(idFather,OF.PROPERTY_OBJECT) if fatherObj.object_type == OF.TYPE_FILE or fatherObj.object_type == OF.TYPE_MOUNT: self._LOGGER.warning('The parent object is not a Directory or a Root') return OF.EACCESS idObj = self._createObjectKey(bucket_name, object_name,idFather) obj = ObjectProperties() obj.id = idObj obj.uid = uid obj.object_type = object_type obj.bucket_name = bucket_name obj.object_name = object_name obj.id_parent = idFather self.metaObj.create(obj.id,obj) # Link To parent increase counter .. seg = self.metaObj.getSegment(idFather, 0) seg[obj.object_name] = obj.id self.metaObj.setSegment(obj.id_parent,0,seg) return idObj
def getObjectList(self,bucket_name,prefix,marker,max_keys,terse=0): path = self._getBucketPath(bucket_name) if not os.path.isdir(path): raise web.HTTPError(404) #FIXME object_names = [] dirs = os.listdir(path) for dir in dirs: new_path = os.path.join(path,dir) if os.path.isdir(new_path): self._findObjects(new_path, object_names) skip = len(path) + 1 for i in range(self._BUCKET_DEPTH): skip += 2 * (i + 1) + 1 object_names = [n[skip:] for n in object_names] object_names.sort() contents = [] #FIXME move filter on file scan start_pos = 0 if marker: start_pos = bisect.bisect_right(object_names, marker, start_pos) if prefix: start_pos = bisect.bisect_left(object_names, prefix, start_pos) truncated = False for object_name in object_names[start_pos:]: if not object_name.startswith(prefix): break if len(contents) >= max_keys: truncated = True break obj = ObjectProperties(bucket_name=bucket_name,object_name=object_name) if not terse: obj = self.getObjectProperties(bucket_name,object_name) contents.append(obj) marker = object_name res = ObjectQueryResult(bucket_name, prefix,marker,max_keys,truncated) res.setObjectList(contents) return res