Esempio n. 1
0
    # we have not sent it, and if not, send that object
    for chunk in chunks:
        if not chunk['ChunkId'] in chunkIds:
            continue

        chunkIds.remove(chunk['ChunkId'])
        for obj in chunk['ObjectList']:
            # it is possible that if we start resending a chunk, due to the program crashing, that
            # some objects will already be in cache.  Check to make sure that they are not, and then
            # send the object to Spectra S3
            if obj['InCache'] == 'false':
                realFileName = "resources/" + obj['Name']
                client.put_object(
                    ds3.PutObjectRequest(bucketName,
                                         obj['Name'],
                                         offset=int(obj['Offset']),
                                         job=bulkResult.result['JobId'],
                                         real_file_name=realFileName))

# we now verify that all our objects have been sent to DS3
bucketResponse = client.get_bucket(ds3.GetBucketRequest(bucketName))

for obj in bucketResponse.result['ContentsList']:
    print obj['Key']

# delete the bucket by first deleting all the objects, and then deleting the bucket
for obj in bucketResponse.result['ContentsList']:
    client.delete_object(ds3.DeleteObjectRequest(bucketName, obj['Key']))

client.delete_bucket(ds3.DeleteBucketRequest(bucketName))
Esempio n. 2
0

# get the sizes for each file
fileList = ds3.FileObjectList(list(map(createDs3Obj, fileList)))

# submit the put bulk request to DS3
bulkResult = client.put_bulk_job_spectra_s3(
    ds3.PutBulkJobSpectraS3Request(bucketName, fileList))

# the bulk request will split the files over several chunks if it needs to
# we need to iterate over the chunks, ask the server for space to send
# the chunks, then send all the objects returned in the chunk
for chunk in bulkResult.result['ObjectsList']:
    allocateChunk = client.allocate_job_chunk_spectra_s3(
        ds3.AllocateJobChunkSpectraS3Request(chunk['ChunkId']))
    for obj in allocateChunk.result['ObjectList']:
        objectDataStream = open(fileMap[obj['Name']], "rb")
        client.put_object(
            ds3.PutObjectRequest(bucketName,
                                 obj['Name'],
                                 obj['Length'],
                                 objectDataStream,
                                 offset=int(obj['Offset']),
                                 job=bulkResult.result['JobId']))

# we now verify that all our objects have been sent to DS3
bucketResponse = client.get_bucket(ds3.GetBucketRequest(bucketName))

for obj in bucketResponse.result['ContentsList']:
    print(obj['Key'])
Esempio n. 3
0
    ds3ObjName = "prefix/" + fileName
    fileMap[ds3ObjName] = fileName
    return ds3.Ds3PutObject(ds3ObjName, size)

# get the sizes for each file
objectList = list(map(createDs3PutObject, fileList))

# submit the put bulk request to DS3
bulkResult = client.put_bulk_job_spectra_s3(ds3.PutBulkJobSpectraS3Request(bucketName, objectList))

# the bulk request will split the files over several chunks if it needs to
# we need to iterate over the chunks, ask the server for space to send
# the chunks, then send all the objects returned in the chunk
for chunk in bulkResult.result['ObjectsList']:
    allocateChunk = client.allocate_job_chunk_spectra_s3(ds3.AllocateJobChunkSpectraS3Request(chunk['ChunkId']))
    for obj in allocateChunk.result['ObjectList']:
        objectDataStream = open(fileMap[obj['Name']], "rb")
        objectDataStream.seek(int(obj['Offset']), 0)
        client.put_object(ds3.PutObjectRequest(bucket_name=bucketName,
                                               object_name=obj['Name'],
                                               length=obj['Length'],
                                               stream=objectDataStream,
                                               offset=int(obj['Offset']), 
                                               job=bulkResult.result['JobId']))

# we now verify that all our objects have been sent to DS3
bucketResponse = client.get_bucket(ds3.GetBucketRequest(bucketName))

for obj in bucketResponse.result['ContentsList']:
    print(obj['Key'])