Ejemplo n.º 1
0
def main():
    s3 = boto.connect_s3()

    buckets = s3.get_all_buckets()

    bucket_names = []
    for bucket in buckets:
        print bucket.name
        bucket_names.append(bucket.name)

    print "which bucket would you like to delete? (accepts glob syntax): "
    bucket_pattern = raw_input()

    buckets_to_delete = fnmatch.filter(bucket_names, bucket_pattern)

    print "buckets_to_delete: "
    print buckets_to_delete
    print "Are you sure you want to delete these buckets:?"

    confirm = raw_input()
    if confirm != "y":
        return

    for bucket_name in buckets_to_delete:
        bucket = s3.get_bucket(bucket_name)
        CHECK(bucket)
        LOG(INFO, "bucket %s" % (bucket_name))
        LOG(INFO, "listing contents of bucket...")
        all_keys = [key.name for key in bucket.list()]
        LOG(INFO, "deleting contents of bucket...")
        bucket.delete_keys(all_keys)
        LOG(INFO, "deleting bucket...")
        s3.delete_bucket(bucket_name)

    return
Ejemplo n.º 2
0
def main():
    dataset_name = 'tide_v08'
    sizes = {}
    sizes['thumbnail'] = 100 * 100
    sizes['small'] = 640 * 480
    reset_bucket = False

    #dataset_base_uri = 'local://home/ubuntu/Desktop/vol-0449ca74/itergraph/%s/' % (dataset_name)
    #images_uri = '%s/cropped_scaled_photoid_to_image.pert' % (dataset_base_uri)
    images_uri = 'local://home/ubuntu/Desktop/vol-0449ca74/itergraph/tide_v14/cropped_scaled_photoid_to_image_randomaccess.pert'
    bucket_name = 'tide_image_cache'
    s3 = boto.connect_s3()

    bucket = s3.create_bucket(bucket_name)
    if reset_bucket:
        LOG(INFO, 'listing contents of bucket...')
        all_keys = [key.name for key in bucket.list()]
        LOG(INFO, 'deleting contents of bucket...')
        bucket.delete_keys(all_keys)
        s3.delete_bucket(bucket_name)
        bucket = s3.create_bucket(bucket_name)
        bucket.set_acl('public-read')

    reader = py_pert.StringTableReader()
    CHECK(reader.Open(images_uri))
    progress = iwutil.MakeProgressBar(reader.Entries())

    num_workers = 200
    max_queue_size = 200
    job_queue = JobQueue(num_workers, max_queue_size)
    for i, (key, value) in enumerate(reader):
        image_id = py_base.KeyToUint64(key)
        jpeg_image = iw_pb2.JpegImage()
        jpeg_image.ParseFromString(value)
        job_queue.AddJob(
            ResizeAndUploadImageJob(bucket, sizes, image_id, jpeg_image.data))
        progress.update(i)

    job_queue.WaitForJobsDone()

    return
Ejemplo n.º 3
0
def main():
  dataset_name = 'tide_v08'
  sizes = {}
  sizes['thumbnail'] = 100*100
  sizes['small'] = 640*480   
  reset_bucket = False
  
  #dataset_base_uri = 'local://home/ubuntu/Desktop/vol-0449ca74/itergraph/%s/' % (dataset_name)
  #images_uri = '%s/cropped_scaled_photoid_to_image.pert' % (dataset_base_uri)
  images_uri = 'local://home/ubuntu/Desktop/vol-0449ca74/itergraph/tide_v14/cropped_scaled_photoid_to_image_randomaccess.pert'
  bucket_name = 'tide_image_cache'
  s3 = boto.connect_s3()
  
  bucket = s3.create_bucket(bucket_name)
  if reset_bucket:
    LOG(INFO, 'listing contents of bucket...')
    all_keys = [key.name for key in bucket.list()]
    LOG(INFO, 'deleting contents of bucket...')
    bucket.delete_keys(all_keys)      
    s3.delete_bucket(bucket_name)
    bucket = s3.create_bucket(bucket_name)
    bucket.set_acl('public-read')
  
  reader = py_pert.StringTableReader()
  CHECK(reader.Open(images_uri))    
  progress = iwutil.MakeProgressBar(reader.Entries())
  
  num_workers = 200
  max_queue_size = 200
  job_queue = JobQueue(num_workers, max_queue_size)
  for i, (key, value) in enumerate(reader):    
    image_id = py_base.KeyToUint64(key)
    jpeg_image = iw_pb2.JpegImage()
    jpeg_image.ParseFromString(value)      
    job_queue.AddJob(ResizeAndUploadImageJob(bucket, sizes, image_id, jpeg_image.data))    
    progress.update(i)    
  
  job_queue.WaitForJobsDone()
  
  return
Ejemplo n.º 4
0
 def __init__(self, s3_bucket_name):
   self.s3_bucket_name = s3_bucket_name    
   self.s3 = boto.connect_s3()  
   self.bucket = self.s3.create_bucket(self.s3_bucket_name, policy='public-read')    
   return
Ejemplo n.º 5
0
 def __init__(self, s3_bucket_name):
     self.s3_bucket_name = s3_bucket_name
     self.s3 = boto.connect_s3()
     self.bucket = self.s3.create_bucket(self.s3_bucket_name,
                                         policy='public-read')
     return