Exemplo n.º 1
0
def upload(request):
    if request.method=='GET':
        fileUrl = request.session.get('fileUrl', '')
        publicRead = request.session.get('publicRead', '')
        if not fileUrl:
            return render(request, 'visits/upload.html')
        #have
        if publicRead:
            return render(request, 'visits/upload.html', {'fileUrl':GS_URL+fileUrl})
        # not publicRead
        conn = boto.connect_gs(gs_access_key_id=GS_ACCESS_KEY,
                               gs_secret_access_key=GS_SECRET_KEY)
        bucket = conn.get_bucket(GS_BUCKET_NAME)
        fpic = boto.gs.key.Key(bucket)
        fpic.key = fileUrl
        fileUrl = fpic.generate_url(expires_in=86400)
        return render(request, 'visits/upload.html', {'fileUrl':fileUrl})
    #post
    uploadFile = request.FILES.get('uploadFile', '')
    publicRead = request.POST.get('publicRead', False)
    conn = boto.connect_gs(gs_access_key_id=GS_ACCESS_KEY,
                           gs_secret_access_key=GS_SECRET_KEY)
    bucket = conn.get_bucket(GS_BUCKET_NAME)
    fpic = boto.gs.key.Key(bucket)  
    fpic.key = 'test/'+uploadFile.name
    if publicRead:
        fpic.set_contents_from_file(uploadFile, policy='public-read')
    request.session['fileUrl'] = fpic.key
    request.session['publicRead'] = publicRead
    return redirect(reverse('visits:upload'))
Exemplo n.º 2
0
 def testIsValidBucketValid(self):
     """Tests the bucket vaildation flow."""
     if boto is None:
         logging.info('skip test since boto module not installed')
         return
     conn = self.mox.CreateMockAnything()
     self.mox.StubOutWithMock(boto, 'connect_gs')
     boto.connect_gs('key', 'secret').AndReturn(conn)
     conn.lookup('bucket').AndReturn('bucket')
     conn.close()
     self.mox.ReplayAll()
     valid, details = moblab_rpc_interface._is_valid_bucket(
         'key', 'secret', 'bucket')
     self.assertTrue(valid)
     self.mox.VerifyAll()
Exemplo n.º 3
0
    def delete_file(self, entry, path):
        # Move any bucket subdirectories to the filename
        bucket_name, filename = move_bucket_subdirs_to_path(entry.bucket, path)

        secret = aes_decrypt(entry.secret)
        conn = boto.connect_gs(entry.access_key, secret)
        try:
            bucket = conn.get_bucket(bucket_name)
        except boto.exception.GSResponseError as ex:
            # Can happen when the secret is very bad.
            raise IOError(
                    ("Failed to delete '%s' on Google Cloud Storage " + \
                    "bucket '%s': %s") % \
                    (filename, bucket_name, str(ex)))

        # boto uses s3 keys for GCS ??
        s3key = boto.s3.key.Key(bucket)
        s3key.key = filename

        try:
            s3key.delete()
        except boto.exception.BotoServerError as ex:
            if ex.status != 404:
                raise IOError(
                    ("Failed to delete '%s' from Google Cloud Storage " + \
                    "bucket '%s': %s") % \
                    (filename, bucket_name, str(ex)))
        return {'status': 'OK'}
Exemplo n.º 4
0
 def __init__(self,
              gs_access_key_id=None,
              gs_secret_access_key=None,
              **kwargs):
     conn = boto.connect_gs(gs_access_key_id, gs_secret_access_key,
                            **kwargs)
     super(GSOFS, self).__init__(conn)
Exemplo n.º 5
0
 def testIsValidBotoKeyInvalid(self):
     """Tests the boto key validation with invalid key."""
     if boto is None:
         logging.info('skip test since boto module not installed')
         return
     conn = self.mox.CreateMockAnything()
     self.mox.StubOutWithMock(boto, 'connect_gs')
     boto.connect_gs('key', 'secret').AndReturn(conn)
     conn.get_all_buckets().AndRaise(
         boto.exception.GSResponseError('bad', 'reason'))
     conn.close()
     self.mox.ReplayAll()
     valid, details = moblab_rpc_interface._is_valid_boto_key(
         'key', 'secret')
     self.assertFalse(valid)
     self.assertEquals('The boto access key is not valid', details)
     self.mox.VerifyAll()
Exemplo n.º 6
0
 def __init__(self, server_name, bucket_name, gs_access_key_id=None, gs_secret_access_key=None, is_interoperability_mode=True):
     super(GoogleCloudStorage_Boto, self).__init__(server_name)
     if is_interoperability_mode:
         self.gs_connection = boto.connect_gs(gs_access_key_id, gs_secret_access_key)
         self.bucket = self.gs_connection.get_bucket(bucket_name)
     else:
         gcs_oauth2_boto_plugin.oauth2_helper.SetFallbackClientIdAndSecret(gs_access_key_id, gs_secret_access_key)
         self.gs_storage_uri = boto.storage_uri(bucket_name, 'gs')
         self.bucket = self.gs_storage_uri.get_bucket(bucket_name)
Exemplo n.º 7
0
 def WriteState(self):
     state = {'status': 'WAITING', 'update_time': time.time()}
     conn = boto.connect_gs(config.gs_access_key, config.gs_secret_key)
     bucket = conn.get_bucket(self._bucket)
     k = Key(bucket,
             '%s/%s/output/stitch.state' % (self._user, self._batch))
     k.set_contents_from_string(simplejson.dumps(state, indent=2),
                                policy='public-read',
                                headers={'Content-Type': 'text/plain'})
 def __init__(self, task_doc, output_file):
   self.task_doc = task_doc
   self.output_file = output_file
   self.input_files = {}
   self.input_thumbs = {}
   self.request = None
   self.base = None
   self.conn = boto.connect_gs(config.gs_access_key, config.gs_secret_key)
   self.bucket = self.conn.get_bucket(config.gs_bucket)
 def __init__(self, task_doc, output_file):
     self.task_doc = task_doc
     self.output_file = output_file
     self.input_files = {}
     self.input_thumbs = {}
     self.request = None
     self.base = None
     self.conn = boto.connect_gs(config.gs_access_key, config.gs_secret_key)
     self.bucket = self.conn.get_bucket(config.gs_bucket)
Exemplo n.º 10
0
 def prelaunch(self):
   super(BaseGoogleCloudStorageDeployment, self).prelaunch()
   logging.info('Connecting to GCS...')
   connection = boto.connect_gs(self.access_key, self.secret, is_secure=False)
   self.bucket = connection.get_bucket(self.bucket_name)
   logging.info('Connected! Configuring bucket: {}'.format(self.bucket_name))
   if self.dry_run:
     return
   self.bucket.set_acl('public-read')
   self.bucket.configure_versioning(False)
   self.bucket.configure_website(main_page_suffix='index.html', error_key='404.html')
 def WriteState(self):
   state = {
     'status': 'WAITING',
     'update_time': time.time()
   }
   conn = boto.connect_gs(config.gs_access_key,config.gs_secret_key)
   bucket = conn.get_bucket(self._bucket)
   k = Key(bucket, '%s/%s/output/stitch.state' % (self._user, self._batch))
   k.set_contents_from_string(
       simplejson.dumps(state, indent=2),
       policy='public-read',headers={'Content-Type': 'text/plain'})
Exemplo n.º 12
0
 def prelaunch(self, dry_run=False):
   logging.info('Connecting to GCS...')
   connection = boto.connect_gs(self.access_key, self.secret, is_secure=False)
   self.bucket = connection.get_bucket(self.bucket_name)
   logging.info('Connected!')
   if dry_run:
     return
   logging.info('Configuring bucket: {}'.format(self.bucket_name))
   self.bucket.set_acl('public-read')
   self.bucket.configure_versioning(False)
   self.bucket.configure_website(main_page_suffix='index.html', error_key='404.html')
  def deploy(self, pod, dry_run=False):
    source_connection = boto.connect_gs(self.source_keys[0], self.source_keys[1])
    source_bucket = source_connection.get_bucket(config.BUCKET)

    dest_connection = boto.connect_gs(self.dest_keys[0], self.dest_keys[1])
    dest_bucket = dest_connection.get_bucket(self.bucket_name)

    paths_to_content = pod.dump()
    deployed_index = google_cloud_storage.GoogleCloudStorageDeployment.get_deployed_index(dest_bucket)

    canary_index = index.Index()
    canary_index.update(paths_to_content)
    diffs = canary_index.diff(deployed_index)

    root = os.path.abspath(
        os.path.join(pod.root, '..', 'builds', datetime.datetime.now().strftime('%Y-%m-%d.%H%M%S')))

    if not dry_run:
      dest_bucket.configure_versioning(False)
      dest_bucket.configure_website(main_page_suffix='index.html', error_key='404.html')
      dest_bucket.set_acl('public-read')

      index.Index.apply_diffs(
          diffs, paths_to_content,
          write_func=lambda *args: self._write_file(
              *args, pod=pod, root=root, source_bucket=source_bucket,
              dest_bucket=dest_bucket),
          delete_func=lambda *args: self.delete_file(
              *args, source_bucket=source_bucket, dest_bucket=dest_bucket),
      )
      self._write_file(
          index.Index.BASENAME,
          canary_index.to_yaml(),
          pod=pod,
          root=root,
          source_bucket=source_bucket,
          dest_bucket=dest_bucket,
          policy='private')
      logging.info('Wrote index: /{}'.format(index.Index.BASENAME))

    return diffs
Exemplo n.º 14
0
def main():
    module = AnsibleModule(argument_spec=dict(
        bucket=dict(required=True),
        object=dict(default=None),
        src=dict(default=None),
        dest=dict(default=None),
        expiration=dict(default=600, aliases=['expiry']),
        mode=dict(
            choices=['get', 'put', 'delete', 'create', 'get_url', 'get_str'],
            required=True),
        permission=dict(
            choices=['private', 'public-read', 'authenticated-read'],
            default='private'),
        headers=dict(type='dict', default={}),
        gs_secret_key=dict(no_log=True, required=True),
        gs_access_key=dict(required=True),
        overwrite=dict(default=True, type='bool', aliases=['force']),
    ), )

    if not HAS_BOTO:
        module.fail_json(msg='boto 2.9+ required for this module')

    bucket = module.params.get('bucket')
    obj = module.params.get('object')
    src = module.params.get('src')
    dest = module.params.get('dest')
    if dest:
        dest = os.path.expanduser(dest)
    mode = module.params.get('mode')
    expiry = module.params.get('expiration')
    gs_secret_key = module.params.get('gs_secret_key')
    gs_access_key = module.params.get('gs_access_key')
    overwrite = module.params.get('overwrite')

    if mode == 'put':
        if not src or not object:
            module.fail_json(
                msg=
                "When using PUT, src, bucket, object are mandatory parameters")
    if mode == 'get':
        if not dest or not object:
            module.fail_json(
                msg=
                "When using GET, dest, bucket, object are mandatory parameters"
            )
    if obj:
        obj = os.path.expanduser(module.params['object'])

    try:
        gs = boto.connect_gs(gs_access_key, gs_secret_key)
    except boto.exception.NoAuthHandlerFound, e:
        module.fail_json(msg=str(e))
Exemplo n.º 15
0
 def bucket(self):
   if self.use_interoperable_auth:
     gs_connection = boto.connect_gs(
         self.config.access_key, self.config.access_secret,
         calling_format=connection.OrdinaryCallingFormat())
     # Always use our internal cacerts.txt file. This fixes an issue with the
     # PyInstaller-based frozen distribution, while allowing us to continue to
     # verify certificates and use a secure connection.
     gs_connection.ca_certificates_file = _certs_path
   else:
     gs_connection = storage.get_connection(
         self.config.project, self.config.email, self.config.key_path)
   return gs_connection.get_bucket(self.config.bucket)
Exemplo n.º 16
0
def upload(request):    
    if request.method=='POST':
        fileToUpload = request.FILES['fileToUpload']
        folderName = request.POST.get('folderName', '')
        fileName = folderName + '/' + fileToUpload.name
        conn = boto.connect_gs(gs_access_key_id=GS_ACCESS_KEY,
                               gs_secret_access_key=GS_SECRET_KEY)
        bucket = conn.get_bucket(GS_BUCKET_NAME)
        k = boto.gs.key.Key(bucket)
        k.key = fileName
        k.set_contents_from_file(fileToUpload, policy='public-read')
        return HttpResponse(GS_URL + fileName)
    return HttpResponse('upload fail')
Exemplo n.º 17
0
 def bucket(self):
   if self.use_interoperable_auth:
     gs_connection = boto.connect_gs(
         self.config.access_key, self.config.access_secret,
         calling_format=connection.OrdinaryCallingFormat())
     # Always use our internal cacerts.txt file. This fixes an issue with the
     # PyInstaller-based frozen distribution, while allowing us to continue to
     # verify certificates and use a secure connection.
     gs_connection.ca_certificates_file = utils.get_cacerts_path()
   else:
     gs_connection = storage.get_connection(
         self.config.project, self.config.email, self.config.key_path)
   return gs_connection.get_bucket(self.config.bucket)
    def __init__(self, client_id, client_secret, bucket_name, **kwargs):
        """
        Init adapter
        :param client_id: str
        :param client_secret: str
        :param bucket_name: str
        :param kwargs: dict
        """
        self.gs_conn = boto.connect_gs(client_id, client_secret)
        self.bucket = self.gs_conn.get_bucket(bucket_name)

        self.cache_folder = kwargs.get('cache_folder', 'cache').strip('/')
        self.domain = kwargs.get('domain', '%s.storage.googleapis.com' % bucket_name)
        self.schema = kwargs.get('schema', 'https')
Exemplo n.º 19
0
def fileUpload(request):
    
    conn = boto.connect_gs(gs_access_key_id=GS_ACCESS_KEY,
                            gs_secret_access_key=GS_SECRET_KEY)
    bucket = conn.get_bucket(GS_BUCKET_NAME)
    fileToUpload = request.FILES['file']
    cloudFileName = 'test/recordAppy/'+fileToUpload.name
    fpic = boto.gs.key.Key(bucket)
    fpic.key = cloudFileName
    fpic.set_contents_from_file(fileToUpload)
        
    url = fpic.generate_url(expires_in=86400)
    
    return url  #在catch時要依靠json來傳值, 但json不支援bytes型態, 故在此轉為str
Exemplo n.º 20
0
    def __init__(self, client_id, client_secret, bucket_name, **kwargs):
        """
        Init adapter
        :param client_id: str
        :param client_secret: str
        :param bucket_name: str
        :param kwargs: dict
        """
        self.gs_conn = boto.connect_gs(client_id, client_secret)
        self.bucket = self.gs_conn.get_bucket(bucket_name)

        self.cache_folder = kwargs.get('cache_folder', 'cache').strip('/')
        self.domain = kwargs.get('domain',
                                 '%s.storage.googleapis.com' % bucket_name)
        self.schema = kwargs.get('schema', 'https')
 def AddFile(self, filename, contents):
   part = 0
   bytes_left = len(contents)
   parts = []
   conn = boto.connect_gs(config.gs_access_key,config.gs_secret_key)
   bucket = conn.get_bucket(self._bucket)
   while bytes_left > 0:
     fname = '%s.%d' % (filename, part)
     parts.append(fname)
     offset = part * _CHUNK_SIZE
     k = Key(bucket, '%s/%s/input/%s' % (self._user, self._batch, fname))
     k.set_contents_from_string(
         contents[offset:offset+_CHUNK_SIZE])
     part += 1
     bytes_left -= _CHUNK_SIZE
   self._files.append({'name': filename, 'chunks': parts})
Exemplo n.º 22
0
 def AddFile(self, filename, contents):
     part = 0
     bytes_left = len(contents)
     parts = []
     conn = boto.connect_gs(config.gs_access_key, config.gs_secret_key)
     bucket = conn.get_bucket(self._bucket)
     while bytes_left > 0:
         fname = '%s.%d' % (filename, part)
         parts.append(fname)
         offset = part * _CHUNK_SIZE
         k = Key(bucket,
                 '%s/%s/input/%s' % (self._user, self._batch, fname))
         k.set_contents_from_string(contents[offset:offset + _CHUNK_SIZE])
         part += 1
         bytes_left -= _CHUNK_SIZE
     self._files.append({'name': filename, 'chunks': parts})
Exemplo n.º 23
0
 def bucket(self):
     if self.config.oauth2:
         enable_oauth2_auth_handler()
     gs_connection = boto.connect_gs(
         self.config.access_key, self.config.access_secret,
         calling_format=connection.OrdinaryCallingFormat())
     # Always use our internal cacerts.txt file. This fixes an issue with the
     # PyInstaller-based frozen distribution, while allowing us to continue to
     # verify certificates and use a secure connection.
     gs_connection.ca_certificates_file = utils.get_cacerts_path()
     try:
         return gs_connection.get_bucket(self.config.bucket)
     except boto.exception.GSResponseError as e:
         if e.status == 404:
             logging.info('Creating bucket: {}'.format(self.config.bucket))
             return gs_connection.create_bucket(self.config.bucket)
         raise
Exemplo n.º 24
0
 def bucket(self):
     if self.config.oauth2:
         enable_oauth2_auth_handler()
     gs_connection = boto.connect_gs(
         self.config.access_key, self.config.access_secret,
         calling_format=connection.OrdinaryCallingFormat())
     # Always use our internal cacerts.txt file. This fixes an issue with the
     # PyInstaller-based frozen distribution, while allowing us to continue to
     # verify certificates and use a secure connection.
     gs_connection.ca_certificates_file = utils.get_cacerts_path()
     try:
         return gs_connection.get_bucket(self.config.bucket)
     except boto.exception.GSResponseError as e:
         if e.status == 404:
             logging.info('Creating bucket: {}'.format(self.config.bucket))
             return gs_connection.create_bucket(self.config.bucket)
         raise
Exemplo n.º 25
0
def upload_image(img=None, encoded_image=True, filename='images/' + str(uuid.uuid4()) + '.JPG', public=True,
                 cache_control='max-age=2000000',
                 content_type='image/jpeg'):
    """
    Uploads a base64 encoded image to amazon s3 bucket.
    :param img: data for image
    :param encoded_image: if base64 encoded image
    :param filename: s3 filename
    :param public: boolean if public
    :param cache_control: http cache-control value
    :param content_type: http content type
    :return:
    """
    # in memory file
    f = cStringIO.StringIO()

    # manipulate with pillow
    if encoded_image:
        img = base64.b64decode(img)
        img = Image.open(cStringIO.StringIO(img))
    else:
        img = Image.open(img)
        
    img.convert("RGB")
    img.thumbnail((1200, 1200))
    img.save(f, 'JPEG', quality=75)

    # Connect to google storage
    gs = boto.connect_gs(current_app.config['GS_ACCESS_KEY'],
                         current_app.config['GS_SECRET'])

    # Get bucket
    bucket = gs.get_bucket(current_app.config['BUCKET'])

    # create file
    gs_file = Key(bucket)
    gs_file.key = filename
    gs_file.set_metadata('cache-control', cache_control)
    gs_file.set_metadata('content-type', content_type)
    gs_file.set_contents_from_string(f.getvalue())

    if public:
        gs_file.make_public()

    return gs_file
def main():
    module = AnsibleModule(
        argument_spec = dict(
            bucket         = dict(required=True),
            object         = dict(default=None),
            src            = dict(default=None),
            dest           = dict(default=None),
            expiration     = dict(default=600, aliases=['expiry']),
            mode           = dict(choices=['get', 'put', 'delete', 'create', 'get_url', 'get_str'], required=True),
            permission     = dict(choices=['private', 'public-read', 'authenticated-read'], default='private'),
            headers        = dict(type='dict', default={}),
            gs_secret_key  = dict(no_log=True, required=True),
            gs_access_key  = dict(required=True),
            overwrite      = dict(default=True, type='bool', aliases=['force']),
        ),
    )

    if not HAS_BOTO:
        module.fail_json(msg='boto 2.9+ required for this module')

    bucket        = module.params.get('bucket')
    obj           = module.params.get('object')
    src           = module.params.get('src')
    dest          = module.params.get('dest')
    if dest:
        dest      = os.path.expanduser(dest)
    mode          = module.params.get('mode')
    expiry        = module.params.get('expiration')
    gs_secret_key = module.params.get('gs_secret_key')
    gs_access_key = module.params.get('gs_access_key')
    overwrite     = module.params.get('overwrite')

    if mode == 'put':
        if not src or not object:
            module.fail_json(msg="When using PUT, src, bucket, object are mandatory parameters")
    if mode == 'get':
        if not dest or not object:
            module.fail_json(msg="When using GET, dest, bucket, object are mandatory parameters")
    if obj:
        obj = os.path.expanduser(module.params['object'])

    try:
        gs = boto.connect_gs(gs_access_key, gs_secret_key)
    except boto.exception.NoAuthHandlerFound, e:
        module.fail_json(msg = str(e))
Exemplo n.º 27
0
def delete_image(key):
    """
    Deletes a key (file) from Amazon S3 Bucket
    :param key:
    :return: None
    """
    # Connect to S3
    gs = boto.connect_gs(current_app.config['GS_ACCESS_KEY'],
                         current_app.config['GS_SECRET'])

    # Get bucket
    bucket = gs.get_bucket(current_app.config['AWS_S3_BUCKET'])

    gs_file = Key(bucket)

    gs_file.key = key

    bucket.delete_key(gs_file)
Exemplo n.º 28
0
Arquivo: ccp.py Projeto: tly1980/cscp
    def __init__(self, config, srcfiles, dstFolder, tmpFolder='/tmp/'):
        self.buckets = {}
        self.gs_config = config['gs']
        self.gs_conn = boto.connect_gs(
            self.gs_config['api_key'], self.gs_config['api_secret'])
        

        self.s3_config = config['s3']
        self.s3_conn = boto.connect_s3(
            self.s3_config['api_key'], self.s3_config['api_secret'])

        self.srcfiles = srcfiles
        self.dstFolder = dstFolder

        self.success_list = []
        self.failed_list = []
        self.tmpFolder = tmpFolder
        self.logger = logging.getLogger(CopyMachine.__name__)
Exemplo n.º 29
0
 def reconnect(self):
     self.conn = boto.connect_gs(self.access_key_id,self.secret_access_key)
     buckets = map( lambda x: x.name, self.conn.get_all_buckets())
     if not self.bucket_name in buckets:
         try:
             self.conn.create_bucket(self.bucket_name)
             self.logger.info('Successfully created bucket "%s"' % self.bucket_name)
         except boto.exception.S3CreateError, e:
             uuid = "cloudfusion_"+get_uuid()
             msg = "Failed to create bucket %s; You can try another bucket name, for instance %s" % (self.bucket_name, uuid)
             if len(buckets) > 0:
                 msg += "\nor an already existing bucket: %s" % buckets
             self.logger.error('Failed to create bucket:'+ repr(e))
             self.logger.debug(msg)
             print msg
             sys.exit()
         except boto.exception.StorageCreateError, e:
             self.logger.error('Failed to create bucket:'+ repr(e))
             sys.exit()
Exemplo n.º 30
0
def upload_file_to_s3(contents, key, content_type, do_gzip=True, max_age=300, public=True):
    """ Puts a file in s3
    :param contents: must be string
    :param key: string filename to use
    :param content_type:
    :param do_gzip: boolean
    :param max_age: int for cache max age
    :param public: boolean
    :return:
    """

    # fake a file for gzip
    out = StringIO.StringIO()

    if do_gzip:
        with gzip.GzipFile(fileobj=out, mode="w") as outfile:
            outfile.write(contents)
    else:
        out.write(contents)

    gs = boto.connect_gs(current_app.config['GS_ACCESS_KEY'],
                         current_app.config['GS_SECRET'])

    # Get bucket
    bucket = gs.get_bucket(current_app.config['BUCKET'])

    # Create key
    k = Key(bucket)
    k.key = key

    # metadata
    k.set_metadata('content-type', content_type)
    k.set_metadata('cache-control', 'max-age=%d' % max_age)
    k.set_metadata('content-encoding', 'gzip')

    # upload file
    k.set_contents_from_string(out.getvalue())

    if public:
        k.make_public()
Exemplo n.º 31
0
def get_street_view_coverage(x, y, z=21):
    url = "http://mt1.googleapis.com/vt?hl=en-US&lyrs=svv|cb_client:apiv3&style=40,18&gl=US&x=%d&y=%d&z=%d" % (
        x, y, z)
    response = requests.get(url)
    f = StringIO.StringIO(response.content)
    img = Img.open(f)

    # save image to s3
    gs = boto.connect_gs(current_app.config['GS_ACCESS_KEY'],
                         current_app.config['GS_SECRET'])

    # Get bucket
    bucket = gs.get_bucket(current_app.config['BUCKET'])

    cache_control = 'max-age=200'
    content_type = 'image/png'

    s3_file = Key(bucket)
    s3_file.key = 'temp/google_street_view_tiles/%d/%d/%d.PNG' % (z, x, y)
    s3_file.set_metadata('cache-control', cache_control)
    s3_file.set_metadata('content-type', content_type)
    s3_file.set_contents_from_string(f.getvalue())
    s3_file.make_public()
Exemplo n.º 32
0
 def __init__(self, bucket_name):
     self.conn = boto.connect_gs()
     self.bucket = self._bucket(bucket_name)
Exemplo n.º 33
0
from models import User

from google.appengine.ext import ndb
from google.appengine.api import app_identity
from google.appengine.api import images
from google.appengine.ext import blobstore
from google.appengine.ext.webapp import blobstore_handlers

import os
import shutil
import StringIO
import tempfile
import time

gs_conn = connect_gs(
    gs_access_key_id='GOOGOFVKAJIC5VSCR3OT',
    gs_secret_access_key='QMaFR9XJMkh2L5P41qQ+PChgHA+0zHMpuZoZS2l4')
GOOGLE_STORAGE = 'gs'


@endpoints.api(name="user",
               version="v1",
               description="This should be for movie quotes but it says User")
class UserApi(protorpc.remote.Service):
    """This is the api for the movie Quotes"""
    pass

    @User.method(name="user.create", path="user/create", http_method="POST")
    def user_create(self, request):
        """create users as well as update the data"""
        if request.from_datastore:
Exemplo n.º 34
0
 def _get_connection(self):
     """Return native connection object."""
     return boto.connect_gs(self.account, self.secret_key)
Exemplo n.º 35
0
def get_image(lat,
              lon,
              zoom,
              location_id=None,
              layer="DigitalGlobe:ImageryTileService",
              profile="MyDG_Color_Consumer_Profile",
              training_only=False):
    """ Gets a tile and saves it to s3 while also saving the important acquisition date to the db.
    :param lat:
    :param lon:
    :param zoom:
    :param location_id:
    :param layer:
    :return:
    """
    # convert lat lon to tile
    x, y = degree_to_tile_number(lat, lon, zoom)

    # build url
    url = _build_dg_url(x,
                        y,
                        zoom,
                        current_app.config['DG_EV_CONNECT_ID'],
                        profile=profile)

    # get tile
    auth = current_app.config['DG_EV_USERNAME'], current_app.config[
        'DG_EV_PASSWORD']
    id = current_app.config['DG_EV_CONNECT_ID']

    m, n = 5, 5
    mosaic = Img.new('RGB', (256 * m, 256 * n))

    tile_matrix = [[None for i in range(m)] for j in range(n)]

    def download(args):
        i, j = args
        img_url = _build_dg_url(x + i - m / 2,
                                y + j - n / 2,
                                zoom,
                                id,
                                profile=profile)
        r = requests.get(img_url, auth=auth)

        if r.status_code != 200 or int(r.headers['content-length']) < 1000:
            return False

        f = StringIO.StringIO(r.content)
        tile = Img.open(f)

        mosaic.paste(tile, (i * 256, j * 256))
        tile_matrix[i][j] = {'tile': tile, 'data': get_image_data(tile)}
        return True

    pool = ThreadPool(m * n)
    results = pool.map(download, [(i, j) for i, row in enumerate(tile_matrix)
                                  for j, col in enumerate(row)])
    pool.close()
    pool.join()

    if sum(results) < m * n:
        print('some tiles failed to download')
        return

    data = tile_matrix[int(len(tile_matrix) / 2)][int(len(tile_matrix[0]) /
                                                      2)]['data']
    # adjust image data for all other tiles in mosaic
    data['resolution'] = max([
        max([col['data']['resolution'] for col in row]) for row in tile_matrix
    ])
    data['date_acquired_earliest'] = min([
        min([col['data']['date_acquired_earliest'] for col in row])
        for row in tile_matrix
    ])
    data['date_acquired_latest'] = min([
        min([col['data']['date_acquired_latest'] for col in row])
        for row in tile_matrix
    ])

    data['corner_ne_lat'] = tile_matrix[-1][0]['data']['corner_ne_lat']
    data['corner_ne_lon'] = tile_matrix[-1][0]['data']['corner_ne_lon']
    data['corner_sw_lat'] = tile_matrix[0][-1]['data']['corner_sw_lat']
    data['corner_sw_lon'] = tile_matrix[0][-1]['data']['corner_sw_lon']
    data['url'] = "images/digital_globe/%s/%s" % (profile,
                                                  str(uuid.uuid4()) + '.JPG')
    data['source'] = "VHRI"

    # quality checks
    if (data['date_acquired_latest'] -
            data['date_acquired_earliest']).days > 200:
        print('inconsistent acquisition date: %d days' %
              (data['date_acquired_latest'] -
               data['date_acquired_earliest']).days)
        return

    if data['resolution'] > 1:
        print('poor resolution: %f' % data['resolution'])
        return

    # n = 100
    # size = mosaic.size
    # white_thresh = 200
    # num_white = 0
    # for i in range(n):
    #     pixel = mosaic.getpixel((random.randrange(0,size[0]),random.randrange(0,size[1])))
    #     if sum((int(color > white_thresh) for color in pixel[:3])) >= 2:
    #         num_white += 1
    #
    # print num_white/float(n)

    data.pop('resolution', None)

    if location_id is None:
        if training_only:
            location = Location(lat=data['lat'],
                                lon=data['lon'],
                                source='random',
                                use_validation=True)
        else:
            location = Location(lat=data['lat'],
                                lon=data['lon'],
                                source='random')
        db.session.add(location)
        db.session.flush()
        location_id = location.id

    data['location_id'] = location_id

    # mosaic.show()

    out = StringIO.StringIO()
    mosaic.save(out, format='JPEG', optimize=True, quality=30)

    image = Image(**data)
    db.session.add(image)

    # save image to s3
    gs = boto.connect_gs(current_app.config['GS_ACCESS_KEY'],
                         current_app.config['GS_SECRET'])

    # Get bucket
    bucket = gs.get_bucket(current_app.config['BUCKET'])

    cache_control = 'max-age=2000000'
    content_type = 'image/jpeg'

    s3_file = Key(bucket)
    s3_file.key = data['url']
    s3_file.set_metadata('cache-control', cache_control)
    s3_file.set_metadata('content-type', content_type)
    s3_file.set_contents_from_string(out.getvalue())
    s3_file.make_public()

    # save information to database
    db.session.commit()
Exemplo n.º 36
0
 def __init__(self, key, secret, bucket_name):
     self.access_key = key
     self.secret = secret
     self.bucket = bucket_name
     self.conn = boto.connect_gs(key, secret)
     self.bucket = boto.s3.bucket.Bucket(self.conn, bucket_name)
Exemplo n.º 37
0
import StringIO
import string
import json
import urllib
import os,sys,boto
from boto.gs.connection import GSConnection
from boto.s3.key import Key
import constants

# make connection to Google
conn = boto.connect_gs(constants.GS_ACCESS_KEY,constants.GS_SECRET_KEY)

in_message = sys.argv[1]

message = json.loads(urllib.unquote(in_message))
primary = message['primary']
image = message['image']

bucket = conn.get_bucket(primary)
fpic = Key(bucket)
fpic.key = image
fpic.get_contents_to_filename(constants.TMP_DIR+image)






Exemplo n.º 38
0
 def __init__(self, gs_access_key_id=None, gs_secret_access_key=None, **kwargs):
     conn = boto.connect_gs(gs_access_key_id, gs_secret_access_key, **kwargs)
     super(GSOFS, self).__init__(conn)    
Exemplo n.º 39
0
import os
import sys
import boto
conn = boto.connect_gs()
def Usage():
    print "python download_gs_dir.py <bucket name> <google storage path> <save dir>\n\nExample: python download_gs_dir.py percolata logdump/ /tmp/save/"
    sys.exit()
try:
    script, bucket_name, gs_dir, save_dir = sys.argv
except:
    Usage()

bucket = conn.get_bucket(bucket_name)
print bucket
for key in bucket.list(gs_dir):
    print key, key.name
    save_path = os.path.join(save_dir, key.name)
    file_dir = os.path.split(save_path)[0]
    if not os.path.isdir(file_dir):
        os.makedirs(file_dir)
    try:
        res = key.get_contents_to_filename(save_path)
        print "download ok: %s" % key.name
    except Exception as e:
        print e
        print (key.name+":"+"FAILED")
Exemplo n.º 40
0
def main():
    module = AnsibleModule(argument_spec=dict(
        bucket=dict(required=True),
        object=dict(default=None, type='path'),
        src=dict(default=None),
        dest=dict(default=None, type='path'),
        expiration=dict(type='int', default=600, aliases=['expiry']),
        mode=dict(
            choices=['get', 'put', 'delete', 'create', 'get_url', 'get_str'],
            required=True),
        permission=dict(
            choices=['private', 'public-read', 'authenticated-read'],
            default='private'),
        headers=dict(type='dict', default={}),
        gs_secret_key=dict(no_log=True, required=True),
        gs_access_key=dict(required=True),
        overwrite=dict(default=True, type='bool', aliases=['force']),
        region=dict(default='US', type='str'),
        versioning=dict(default='no', type='bool')), )

    if not HAS_BOTO:
        module.fail_json(
            msg=
            '`boto` 2.9+ is required for this module. Try: pip install `boto` --upgrade'
        )

    bucket = module.params.get('bucket')
    obj = module.params.get('object')
    src = module.params.get('src')
    dest = module.params.get('dest')
    mode = module.params.get('mode')
    expiry = module.params.get('expiration')
    gs_secret_key = module.params.get('gs_secret_key')
    gs_access_key = module.params.get('gs_access_key')
    overwrite = module.params.get('overwrite')

    if mode == 'put':
        if not src or not object:
            module.fail_json(
                msg=
                "When using PUT, src, bucket, object are mandatory parameters")
    if mode == 'get':
        if not dest or not object:
            module.fail_json(
                msg=
                "When using GET, dest, bucket, object are mandatory parameters"
            )

    try:
        gs = boto.connect_gs(gs_access_key, gs_secret_key)
    except boto.exception.NoAuthHandlerFound as e:
        module.fail_json(msg=str(e))

    if mode == 'get':
        if not bucket_check(module, gs, bucket) or not key_check(
                module, gs, bucket, obj):
            module.fail_json(msg="Target bucket/key cannot be found",
                             failed=True)
        if not path_check(dest):
            download_gsfile(module, gs, bucket, obj, dest)
        else:
            handle_get(module, gs, bucket, obj, overwrite, dest)

    if mode == 'put':
        if not path_check(src):
            module.fail_json(msg="Local object for PUT does not exist",
                             failed=True)
        handle_put(module, gs, bucket, obj, overwrite, src, expiry)

    # Support for deleting an object if we have both params.
    if mode == 'delete':
        handle_delete(module, gs, bucket, obj)

    if mode == 'create':
        handle_create(module, gs, bucket, obj)

    if mode == 'get_url':
        if bucket and obj:
            if bucket_check(module, gs, bucket) and key_check(
                    module, gs, bucket, obj):
                get_download_url(module, gs, bucket, obj, expiry)
            else:
                module.fail_json(msg="Key/Bucket does not exist", failed=True)
        else:
            module.fail_json(msg="Bucket and Object parameters must be set",
                             failed=True)

    # --------------------------- Get the String contents of an Object -------------------------
    if mode == 'get_str':
        if bucket and obj:
            if bucket_check(module, gs, bucket) and key_check(
                    module, gs, bucket, obj):
                download_gsstr(module, gs, bucket, obj)
            else:
                module.fail_json(msg="Key/Bucket does not exist", failed=True)
        else:
            module.fail_json(msg="Bucket and Object parameters must be set",
                             failed=True)
Exemplo n.º 41
0
 def _get_connection(self):
     """Return native connection object."""
     return boto.connect_gs(self.account, self.secret_key)
Exemplo n.º 42
0
 def __init__(self, key, secret, bucket_name):
     self.access_key = key
     self.secret = secret
     self.bucket = bucket_name
     self.conn = boto.connect_gs(key, secret)
     self.bucket = boto.s3.bucket.Bucket(self.conn, bucket_name)
Exemplo n.º 43
0
def main():
    module = AnsibleModule(
        argument_spec = dict(
            bucket         = dict(required=True),
            object         = dict(default=None, type='path'),
            src            = dict(default=None),
            dest           = dict(default=None, type='path'),
            expiration     = dict(type='int', default=600, aliases=['expiry']),
            mode           = dict(choices=['get', 'put', 'delete', 'create', 'get_url', 'get_str'], required=True),
            permission     = dict(choices=['private', 'public-read', 'authenticated-read'], default='private'),
            headers        = dict(type='dict', default={}),
            gs_secret_key  = dict(no_log=True, required=True),
            gs_access_key  = dict(required=True),
            overwrite      = dict(default=True, type='bool', aliases=['force']),
            region         = dict(default='US', type='str'),
            versioning     = dict(default='no', type='bool')
        ),
    )

    if not HAS_BOTO:
        module.fail_json(msg='boto 2.9+ required for this module')

    bucket        = module.params.get('bucket')
    obj           = module.params.get('object')
    src           = module.params.get('src')
    dest          = module.params.get('dest')
    mode          = module.params.get('mode')
    expiry        = module.params.get('expiration')
    gs_secret_key = module.params.get('gs_secret_key')
    gs_access_key = module.params.get('gs_access_key')
    overwrite     = module.params.get('overwrite')

    if mode == 'put':
        if not src or not object:
            module.fail_json(msg="When using PUT, src, bucket, object are mandatory parameters")
    if mode == 'get':
        if not dest or not object:
            module.fail_json(msg="When using GET, dest, bucket, object are mandatory parameters")

    try:
        gs = boto.connect_gs(gs_access_key, gs_secret_key)
    except boto.exception.NoAuthHandlerFound as e:
        module.fail_json(msg = str(e))

    if mode == 'get':
        if not bucket_check(module, gs, bucket) or not key_check(module, gs, bucket, obj):
            module.fail_json(msg="Target bucket/key cannot be found", failed=True)
        if not path_check(dest):
            download_gsfile(module, gs, bucket, obj, dest)
        else:
            handle_get(module, gs, bucket, obj, overwrite, dest)

    if mode == 'put':
        if not path_check(src):
            module.fail_json(msg="Local object for PUT does not exist", failed=True)
        handle_put(module, gs, bucket, obj, overwrite, src, expiry)

    # Support for deleting an object if we have both params.
    if mode == 'delete':
        handle_delete(module, gs, bucket, obj)

    if mode == 'create':
        handle_create(module, gs, bucket, obj)

    if mode == 'get_url':
        if bucket and obj:
            if bucket_check(module, gs, bucket) and key_check(module, gs, bucket, obj):
                get_download_url(module, gs, bucket, obj, expiry)
            else:
                module.fail_json(msg="Key/Bucket does not exist", failed=True)
        else:
            module.fail_json(msg="Bucket and Object parameters must be set", failed=True)

    # --------------------------- Get the String contents of an Object -------------------------
    if mode == 'get_str':
        if bucket and obj:
            if bucket_check(module, gs, bucket) and key_check(module, gs, bucket, obj):
                download_gsstr(module, gs, bucket, obj)
            else:
                module.fail_json(msg="Key/Bucket does not exist", failed=True)
        else:
            module.fail_json(msg="Bucket and Object parameters must be set", failed=True)