def getObjectACP(self,bucket_name,object_name): object_path = self._getObjectPath(bucket_name,object_name) path = os.path.join(object_path,"acp.xml") if not os.path.exists(path): return None data = self._readFile(path) myDict = XmlToDict(data).getDict() list = myDict['ACP']['Grant'] grantList = [] if not type(list) is types.DictType: for grantDict in list: grant = Grant() grant.setByDict(grantDict) grantList.append(grant) else: grant = Grant() grant.setByDict(list) grantList.append(grant) acp = ACP() acp.setByGrants(grantList) return acp
def putBucketACL(self, bucket_name, user, session, grantList): self._LOGGER.info("PUT bucket-Acl") context = '' acp = ACP() acp.setByGrants(grantList) self.meta.setBucketACL(bucket_name, acp, context) return
def putBucketACL(self, bucket_name , user, session, grantList): self._LOGGER.info("PUT bucket-Acl") context = '' acp = ACP() acp.setByGrants(grantList) self.meta.setBucketACL(bucket_name, acp, context) return
def putObjectACL(self, bucket_name ,object_name, user, session, grantList): self._LOGGER.info("PUT Object ACL") context = Context(user, '', session, []) # ACCESS OR LOOKUP ? idObject = self.meta.access(bucket_name, object_name, context) acp = ACP() acp.setByGrants(grantList) self.meta.setObjectACL(idObject, acp, context) return
def _putObject(self): self._LOGGER.info("creating the object") self._LOGGER.debug("Bucket-name : %s" % self.s3Request.BUCKET) self._LOGGER.debug("Object-name : %s" % self.s3Request.OBJECT) storage_class = "STANDARD" if self.s3Request._AUTH_S3_HEADERS.has_key("x-amz-storage-class"): storage_class = self.s3Request._AUTH_S3_HEADERS["x-amz-storage-class"] object_acl = S3ACP.AMZ_PRIVATE # if self.s3Request._AUTH_S3_HEADERS.has_key('x-amz-acl'): # object_acl = self.s3Request._AUTH_S3_HEADERS['x-amz-acl'] self._LOGGER.debug("Object acl : %s" % object_acl) content_type = None if self.s3Request._AUTH_S3_HEADERS.has_key("content-type"): content_type = self.s3Request._AUTH_S3_HEADERS["content-type"] self._LOGGER.debug("Object content type : %s" % content_type) meta = self.s3Request.getMetaData() acp = ACP() grant = Grant() # FIXME user amzToID grant.uid = self.getUser().uid grant.permission = ACPHelper.amzToPerm(object_acl) grantList = [grant] acp.setByGrants(grantList) obj = self.application.objectSrv.addObject( self.s3Request.BUCKET, self.s3Request.OBJECT, self.getUser(), self.s3Request.ID_REQUEST, storage_class, acp, content_type, meta, self.getBody(), ) headers = {} headers["ETag"] = obj.md5 return headers
def _putObject(self): self._LOGGER.info("creating the object") self._LOGGER.debug('Bucket-name : %s' % self.s3Request.BUCKET) self._LOGGER.debug('Object-name : %s' % self.s3Request.OBJECT) storage_class = "STANDARD" if self.s3Request._AUTH_S3_HEADERS.has_key('x-amz-storage-class'): storage_class = self.s3Request._AUTH_S3_HEADERS[ 'x-amz-storage-class'] object_acl = S3ACP.AMZ_PRIVATE #if self.s3Request._AUTH_S3_HEADERS.has_key('x-amz-acl'): # object_acl = self.s3Request._AUTH_S3_HEADERS['x-amz-acl'] self._LOGGER.debug('Object acl : %s' % object_acl) content_type = None if self.s3Request._AUTH_S3_HEADERS.has_key('content-type'): content_type = self.s3Request._AUTH_S3_HEADERS['content-type'] self._LOGGER.debug('Object content type : %s' % content_type) meta = self.s3Request.getMetaData() acp = ACP() grant = Grant() #FIXME user amzToID grant.uid = self.getUser().uid grant.permission = ACPHelper.amzToPerm(object_acl) grantList = [grant] acp.setByGrants(grantList) obj = self.application.objectSrv.addObject(self.s3Request.BUCKET, self.s3Request.OBJECT, self.getUser(), self.s3Request.ID_REQUEST, storage_class, acp, content_type, meta, self.getBody()) headers = {} headers["ETag"] = obj.md5 return headers
def write(self, path, data_handler, offset, fh): print '#################' print '##### WRITE ###' print '#################' uid, gid, context = fuse_get_context() xattr = self.listxattr(path) object_acl = S3ACP.ACL_FULL_CONTROL acp = ACP() grant = Grant() grant.uid = uid grant.permission = ACPHelper.s3AclToPerm(object_acl) grantList = [grant] acp.setByGrants(grantList) storage_class="STANDARD" content_type=None self.cache.write(options.root_bucket, path, uid, gid, context, storage_class, object_acl ,content_type, xattr, data_handler) self.data[path] = self.data[path][:offset] + data_handler self.files[path]['st_size'] = len(self.data[path]) return len(data_handler)
def putBucket(self, bucket_name, user, session, amz, location): # service operation for search self._LOGGER.info("Creating the bucket") self._LOGGER.debug('Bucket-name : %s' % bucket_name) context = '' if user.id == 0: self._LOGGER.warning( 'The user have not privileges to create the bucket') raise RestFSError(errCode.err['AuthorizationDeny']['code'],\ errCode.err['AuthorizationDeny']['message'],\ errCode.err['AuthorizationDeny']['http']) res = self.res.findBucket(bucket_name) if res > 0: self._LOGGER.warning( 'The bucket you have tried to create already exists') raise RestFSError(errCode.err['BucketAlreadyExists']['code'],\ errCode.err['BucketAlreadyExists']['message'],\ errCode.err['BucketAlreadyExists']['http']) # How many bucket can create ? num = self.res.getCountBucketByOwner(user.uid) self._LOGGER.debug('Bucket-number created : %s' % num) mymax = User.max_buckets self._LOGGER.debug('Bucket-number max the user can create : %s' % mymax) if num > mymax: self._LOGGER.warning( 'The user has reached the limit of the buckets to create') raise RestFSError(errCode.err['LimitBucketsReached']['code'],\ errCode.err['LimitBucketsReached']['message'],\ errCode.err['LimitBucketsReached']['http']) #check policy #FIX if res > 0: self._LOGGER.warning('The policy document specified is invalid') raise RestFSError(errCode.err['InvalidPolicyDocument']['code'],\ errCode.err['InvalidPolicyDocument']['message'],\ errCode.err['InvalidPolicyDocument']['http']) #check location loc = self.res.getRegionList() if not location: location = "EU" elif location not in loc: self._LOGGER.warning( 'The region/location the user specified was not found') raise RestFSError(errCode.err['RegionNotFound']['code'],\ errCode.err['RegionNotFound']['message'],\ errCode.err['RegionNotFound']['http']) #Find cluster # Set Administration privileges .. # Make registration of the new Bucket # Resource manager operation self.res.addBucket(bucket_name, user.uid) # Convert S3 Permission to RestFs Permission acp = ACP() grant = Grant() grant.permission = ACPHelper.amzToPerm(amz) grant.uid = user.uid grantList = [grant] acp.setByGrants(grantList) #FIX ME gid = '' self.meta.createBucket(bucket_name, acp, location, user.uid, gid, context) #Storage operation self.storage.createBucket(bucket_name, context) #Close the transaction, put bucket online self.res.setBucketStatus(bucket_name, 2, context)
def putBucket(self,bucket_name, user, session, amz, location): # service operation for search self._LOGGER.info("Creating the bucket") self._LOGGER.debug('Bucket-name : %s' % bucket_name) context = '' if user.id == 0 : self._LOGGER.warning('The user have not privileges to create the bucket') raise RestFSError(errCode.err['AuthorizationDeny']['code'],\ errCode.err['AuthorizationDeny']['message'],\ errCode.err['AuthorizationDeny']['http']) res = self.res.findBucket(bucket_name) if res > 0: self._LOGGER.warning('The bucket you have tried to create already exists') raise RestFSError(errCode.err['BucketAlreadyExists']['code'],\ errCode.err['BucketAlreadyExists']['message'],\ errCode.err['BucketAlreadyExists']['http']) # How many bucket can create ? num = self.res.getCountBucketByOwner(user.uid) self._LOGGER.debug('Bucket-number created : %s' % num) mymax = User.max_buckets self._LOGGER.debug('Bucket-number max the user can create : %s' % mymax) if num > mymax: self._LOGGER.warning('The user has reached the limit of the buckets to create') raise RestFSError(errCode.err['LimitBucketsReached']['code'],\ errCode.err['LimitBucketsReached']['message'],\ errCode.err['LimitBucketsReached']['http']) #check policy #FIX if res > 0: self._LOGGER.warning('The policy document specified is invalid') raise RestFSError(errCode.err['InvalidPolicyDocument']['code'],\ errCode.err['InvalidPolicyDocument']['message'],\ errCode.err['InvalidPolicyDocument']['http']) #check location loc = self.res.getRegionList() if not location: location = "EU" elif location not in loc: self._LOGGER.warning('The region/location the user specified was not found') raise RestFSError(errCode.err['RegionNotFound']['code'],\ errCode.err['RegionNotFound']['message'],\ errCode.err['RegionNotFound']['http']) #Find cluster # Set Administration privileges .. # Make registration of the new Bucket # Resource manager operation self.res.addBucket(bucket_name,user.uid) # Convert S3 Permission to RestFs Permission acp = ACP() grant = Grant() grant.permission = ACPHelper.amzToPerm(amz) grant.uid = user.uid grantList = [grant] acp.setByGrants(grantList) #FIX ME gid = '' self.meta.createBucket(bucket_name,acp,location,user.uid, gid,context) #Storage operation self.storage.createBucket(bucket_name,context) #Close the transaction, put bucket online self.res.setBucketStatus(bucket_name,2,context)