def destroyObject(bucket, key): '''destroy's object''' conn = Connection() try: #Validate the bucket _verifyBucket(conn, bucket, True) #Check for object and get information from database query = "SELECT hashfield FROM object WHERE bucket = %s AND object = %s" result = conn.executeStatement(query, (escape_string(str(bucket)), escape_string(str(key)))) if len(result) == 0: raise NotFoundException.NoSuchKeyException(bucket, key) #Delete the object from the database and the filesystem query = "DELETE FROM object_metadata WHERE bucket = %s AND object = %s" conn.executeStatement(query, (escape_string(str(bucket)), escape_string(str(key)))) query = "DELETE FROM object WHERE bucket = %s AND object = %s" conn.executeStatement(query, (escape_string(str(bucket)), escape_string(str(key)))) except: conn.cancelAndClose() raise conn.close() hashString = result[0][0] path = Config.get('common','filesystem_path') path += str(bucket) path += "/"+hashString[0:3]+"/"+hashString[3:6]+"/"+hashString[6:9] os.remove(path+"/"+hashString) try: os.removedirs(path) except OSError, e: if e.errno != errno.ENOTEMPTY: raise
def getObject(bucket, key, getMetadata, getData, byteRangeStart = None, byteRangeEnd = None, ifMatch = None, ifNotMatch = None, ifModifiedSince = None, ifNotModifiedSince = None, ifRange = None): '''returns object''' conn = Connection() try: #Validate the bucket _verifyBucket(conn, bucket, True) #Check for object and get information from database query = "SELECT o.object, o.bucket, o.hashfield, o.object_create_time, o.eTag, o.object_mod_time, o.size, o.content_type, o.content_encoding, o.content_disposition, o.userid, u.username FROM object as o, user as u WHERE o.bucket = %s AND o.object = %s AND o.userid = u.userid" result = conn.executeStatement(query, (escape_string(str(bucket)), escape_string(str(key)))) if len(result) == 0: raise NotFoundException.NoSuchKeyException(bucket, key) result = result[0] #if _passPrecondition(str(result[4]), str(result[5]), str(ifMatch), str(ifNotMatch), str(ifModifiedSince), str(ifNotModifiedSince), str(ifRange)) == False: # byteRangeStart = None # byteRangeEnd = None #Get metadata from database query = "SELECT type, value FROM object_metadata WHERE bucket = %s AND object = %s" metadata = conn.executeStatement(query, (escape_string(str(bucket)), escape_string(str(key)))) except: conn.cancelAndClose() raise else: conn.close() metadataDict = {} for tag in metadata: metadataDict[str(tag[0])] = unicode(tag[1], encoding='utf8') content_range = {} size = 0 hashfield = str(result[2]) if getData: #Get data from filesystem and build content_range path = Config.get('common','filesystem_path') path += str(bucket) path += "/"+hashfield[0:3]+"/"+hashfield[3:6]+"/"+hashfield[6:9]+"/"+hashfield fileReader = open(path, 'rb') try: data = "" if byteRangeStart != None and byteRangeStart > 0: fileReader.seek(byteRangeStart) content_range['start'] = byteRangeStart if byteRangeEnd != None and byteRangeEnd > byteRangeStart: data = fileReader.read(byteRangeEnd-byteRangeStart) content_range['end'] = fileReader.tell() fileReader.read() content_range['total'] = fileReader.tell() size = byteRangeEnd-byteRangeStart else: data = fileReader.read() content_range['end'] = fileReader.tell() content_range['total'] = fileReader.tell() size = content_range['total'] else: if byteRangeEnd != None: content_range['start'] = 0 data = fileReader.read(byteRangeEnd) content_range['end'] = fileReader.tell() fileReader.read() content_range['total'] = fileReader.tell() size = byteRangeEnd else: data = fileReader.read() size = fileReader.tell() finally: fileReader.close() #print data if content_range.has_key('start'): content_range['string'] = str(content_range['start'])+"-"+str(content_range['end'])+"/"+str(content_range['total']) returnDict = {'key':str(result[0]), 'bucket':str(result[1]), 'hash':hashfield, 'creationTime':((result[3]).isoformat('T') + 'Z'), 'eTag':str(result[4]), 'lastModified':((result[5]).isoformat('T') + 'Z'), 'size':size, 'content-type':str(result[7]), 'owner':{'id':int(result[10]), 'name':unicode(result[11], encoding='utf8')}} if str(result[8]) != "" and result[8] != None: returnDict['content-encoding'] = str(result[8]) if str(result[9]) != "" and result[9] != None: returnDict['content-disposition'] = str(result[9]) if content_range.has_key('string'): returnDict['content-range'] = content_range['string'] if getMetadata: returnDict['metadata'] = metadataDict if getData: returnDict['data'] = data return returnDict
def setObject(userid, bucket, key, metadata, data, content_md5 = None, content_type = None, content_disposition = None, content_encoding = None): '''setObject''' if not userid: userid = 1 hashString = None conn = Connection() try: #Validate the bucket _verifyBucket(conn, bucket, userid, True) #Check for object and get information from database calculatedMD5 = md5.new(data) calculatedMD5HexDigest = calculatedMD5.hexdigest() if content_md5 != None and content_md5 != calculatedMD5HexDigest: raise BadRequestException.BadDigestException(content_md5, calculatedMD5HexDigest) #Generate hashfield hashfield = hashlib.sha1() hashfield.update(key) hashfieldHexDigest = '' success = False query = "SELECT COUNT(*) FROM object WHERE hashfield = %s" attemptedHashfieldList = [] for i in range(3): hashfield.update(str(time.time())) hashfieldHexDigest = hashfield.hexdigest() attemptedHashfieldList.append(str(hashfieldHexDigest)) count = conn.executeStatement(query, (str(hashfieldHexDigest)))[0][0] if count == 0: success = True break if success == False: raise InternalErrorException.HashfieldCollisionErrorException(attemptedHashfieldList) #Get size of file size = len(data) if content_type == None: content_type = "binary/octet-stream" if content_encoding == None: content_encoding = "" if content_disposition == None: content_disposition = "" #Build metadata query metadataQuery = "" if metadata != None and metadata != {}: metadataQuery = "INSERT INTO object_metadata (bucket, object, type, value) VALUES ("+"'" for tag, value in metadata.iteritems(): if type(value) == str or type(value) == unicode: value = value.encode('utf8') else: value = str(value) metadataQuery += escape_string(str(bucket))+"', '"+escape_string(str(key))+"', '"+escape_string(tag)+"', '"+escape_string(value)+"'), ('" metadataQuery = metadataQuery[0:-4] #Write to database and filesystem result = conn.executeStatement("SELECT hashfield FROM object WHERE bucket = %s AND object = %s", (escape_string(str(bucket)), escape_string(str(key)))) if len(result) > 0: hashString = result[0][0] path = Config.get('common','filesystem_path') path += str(bucket) path += "/"+hashString[0:3]+"/"+hashString[3:6]+"/"+hashString[6:9] os.remove(path+"/"+hashString) try: os.removedirs(path) except OSError, e: if e.errno != errno.ENOTEMPTY: raise hashString = str(hashfieldHexDigest) query = "UPDATE object SET userid = %s, hashfield = %s, eTag = %s, object_mod_time = NOW(), size = %s, content_type = %s, content_encoding = %s, content_disposition = %s WHERE bucket = %s AND object = %s" conn.executeStatement(query, (int(userid), hashString, str(calculatedMD5HexDigest), int(size), escape_string(str(content_type)), escape_string(str(content_encoding)), escape_string(str(content_disposition)), escape_string(str(bucket)), escape_string(str(key)))) conn.executeStatement("DELETE FROM object_metadata WHERE bucket = %s AND object = %s", (escape_string(str(bucket)), escape_string(str(key)))) else: