-
Notifications
You must be signed in to change notification settings - Fork 0
/
s3copy.py
67 lines (53 loc) · 1.97 KB
/
s3copy.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from Queue import LifoQueue
import threading
source_aws_key = '*******************'
source_aws_secret_key = '*******************'
dest_aws_key = '*******************'
dest_aws_secret_key = '*******************'
srcBucketName = '*******************'
dstBucketName = '*******************'
class Worker(threading.Thread):
def __init__(self, queue):
threading.Thread.__init__(self)
self.source_conn = S3Connection(source_aws_key, source_aws_secret_key)
self.dest_conn = S3Connection(dest_aws_key, dest_aws_secret_key)
self.srcBucket = self.source_conn.get_bucket(srcBucketName)
self.dstBucket = self.dest_conn.get_bucket(dstBucketName)
self.queue = queue
def run(self):
while True:
key_name = self.queue.get()
k = Key(self.srcBucket, key_name)
dist_key = Key(self.dstBucket, k.key)
if not dist_key.exists() or k.etag != dist_key.etag:
print 'copy: ' + k.key
self.dstBucket.copy_key(k.key, srcBucketName, k.key, storage_class=k.storage_class)
else:
print 'exists and etag matches: ' + k.key
self.queue.task_done()
def copyBucket(maxKeys=1000):
print 'start'
s_conn = S3Connection(source_aws_key, source_aws_secret_key)
srcBucket = s_conn.get_bucket(srcBucketName)
resultMarker = ''
q = LifoQueue(maxsize=5000)
for i in range(10):
print 'adding worker'
t = Worker(q)
t.daemon = True
t.start()
while True:
print 'fetch next 1000, backlog currently at %i' % q.qsize()
keys = srcBucket.get_all_keys(max_keys=maxKeys, marker=resultMarker)
for k in keys:
q.put(k.key)
if len(keys) < maxKeys:
print 'Done'
break
resultMarker = keys[maxKeys - 1].key
q.join()
print 'done'
if __name__ == "__main__":
copyBucket()