예제 #1
0
def upload_file_to_s3(file_path, s3_filename, content_type="text/html", extra_message=None):
    """
    Uploads a file to bokeh-travis s3 bucket.
    """
    try:
        conn = boto.connect_s3()
        with open(file_path, "rb") as f:
            contents = f.read()
        upload = True

    except NoAuthHandlerFound:
        fail("Upload was requested but could not connect to S3.")
        fail("This is expected if you are an external contributor submitting a PR to Bokeh.")
        fail("This could also happen if S3 credentials are not available on the machine where this test is running.")
        upload = False

    except OSError:
        fail("Upload was requested but file %s was not available." % file_path)
        upload = False

    if __version__.endswith("-dirty"):
        fail("Uploads are not permitted when working directory is dirty.")
        fail("Make sure that __version__ doesn't contain -dirty suffix.")
        upload = False

    if upload:
        bucket = conn.get_bucket(S3_BUCKET)
        key = S3Key(bucket, s3_filename)
        key.set_metadata("Content-Type", content_type)
        key.set_contents_from_string(contents, policy="public-read")
        url = join(S3_URL, s3_filename)
        if extra_message is not None:
            ok("%s | Access upload at: %s" % (extra_message, url))
        else:
            trace("Access upload at: %s" % url)
예제 #2
0
def upload_file_to_s3(file_path,
                      s3_filename,
                      content_type="text/html",
                      extra_message=None):
    ''' Upload a file to the ci.bokeh.org s3 bucket

    '''
    conn = connect_to_s3()
    upload = conn is not None

    try:
        with open(file_path, "rb") as f:
            contents = f.read()
    except OSError:
        fail("Upload was requested but file %s was not available." % file_path)
        upload = False

    if __version__.endswith("-dirty"):
        fail("Uploads are not permitted when working directory is dirty.")
        fail("Make sure that __version__ doesn't contain -dirty suffix.")
        upload = False

    if upload:
        bucket = conn.get_bucket(S3_BUCKET)
        key = S3Key(bucket, s3_filename)
        key.set_metadata("Content-Type", content_type)
        key.set_contents_from_string(contents, policy="public-read")
        url = join(S3_URL, s3_filename)
        if extra_message is not None:
            ok("%s | Access upload at: %s" % (extra_message, url))
        else:
            trace("Access upload at: %s" % url)
예제 #3
0
 def download_ssh_pubkey( self, ec2_keypair ):
     try:
         bucket = self.s3.get_bucket( self.s3_bucket_name )
         s3_entry = S3Key( bucket )
         s3_entry.key = self.ssh_pubkey_s3_key_prefix + ec2_keypair.fingerprint
         ssh_pubkey = s3_entry.get_contents_as_string( )
     except S3ResponseError as e:
         if e.status == 404:
             raise UserError(
                 "There is no matching SSH pub key stored in S3 for EC2 key pair %s. Has "
                 "it been registered, e.g using the cgcloud's register-key command?" %
                 ec2_keypair.name )
         else:
             raise
     fingerprint_len = len( ec2_keypair.fingerprint.split( ':' ) )
     if fingerprint_len == 20:  # 160 bit SHA-1
         # The fingerprint is that of a private key. We can't get at the private key so we
         # can't verify the public key either. So this is inherently insecure. However,
         # remember that the only reason why we are dealing with n EC2-generated private
         # key is that the Jenkins' EC2 plugin expects a 20 byte fingerprint. See
         # https://issues.jenkins-ci.org/browse/JENKINS-20142 for details. Once that issue
         # is fixed, we can switch back to just using imported keys and 16-byte fingerprints.
         pass
     elif fingerprint_len == 16:  # 128 bit MD5
         fingerprint = ec2_keypair_fingerprint( ssh_pubkey )
         if ec2_keypair.fingerprint != fingerprint:
             raise UserError(
                 "Fingerprint mismatch for key %s! Expected %s but got %s. The EC2 keypair "
                 "doesn't match the public key stored in S3." %
                 (ec2_keypair.name, ec2_keypair.fingerprint, fingerprint) )
     return ssh_pubkey
예제 #4
0
def upload_s3(bucket, csarfile, content_type, filename):
    """Uploads a given StringIO object to S3. Closes the file after upload.
    Returns the URL for the object uploaded.
    bucket -- The bucket where file needs to be uploaded.
    stringio -- StringIO object which needs to be uploaded.
    content_type -- content type that needs to be set for the S3 object.
    """
    # upload the file after getting the right bucket
    print ">>>>>>>>>>>>>>>> csarfile = {}".format(csarfile)
    nbbytes_file = os.path.getsize(csarfile)
    print "nbbytes_file = {}".format(nbbytes_file)
    obj = S3Key(bucket)
    obj.name = filename
    obj.content_type = content_type
    nbbytes_s3 = obj.set_contents_from_filename(csarfile)
    print "nbbytes_s3 = {}".format(nbbytes_s3)
    obj.set_acl('public-read-write')

    for key in bucket.list():
        print "{name}\t{size}".format(name=key.name, size=key.size)

    res = obj.get_contents_to_filename("/tmp/test.zip")
    print "res = {}".format(res)

    return obj.generate_url(expires_in=0, query_auth=False)
예제 #5
0
파일: snippet.py 프로젝트: szabo92/gistable
def upload_s3(file, key_name, content_type, bucket_name):
    """Uploads a given StringIO object to S3. Closes the file after upload.

    Returns the URL for the object uploaded.

    Note: The acl for the file is set as 'public-acl' for the file uploaded.

    Keyword Arguments:
    file -- StringIO object which needs to be uploaded.
    key_name -- key name to be kept in S3.
    content_type -- content type that needs to be set for the S3 object.
    bucket_name -- name of the bucket where file needs to be uploaded.
    """
    # create connection
    conn = S3Connection(app.config['AWS_ACCESS_KEY_ID'],
                        app.config['AWS_SECRET_ACCESS_KEY'])

    # upload the file after getting the right bucket
    bucket = conn.get_bucket(bucket_name)
    obj = S3Key(bucket)
    obj.name = key_name
    obj.content_type = content_type
    obj.set_contents_from_string(file.getvalue())
    obj.set_acl('public-read')

    # close stringio object
    file.close()

    return obj.generate_url(expires_in=0, query_auth=False)
예제 #6
0
 def upload_ssh_pubkey(self, ssh_pubkey, fingerprint):
     bucket = self.s3.lookup(self.s3_bucket_name)
     if bucket is None:
         bucket = self.s3.create_bucket(self.s3_bucket_name,
                                        location=self.s3_location)
     s3_entry = S3Key(bucket)
     s3_entry.key = self.ssh_pubkey_s3_key_prefix + fingerprint
     s3_entry.set_contents_from_string(ssh_pubkey)
예제 #7
0
def copy_files_to_s3(s3_connection, bucket, files):
    prepare_docker_zip()
    sys.stdout.write("Sending files to S3...")
    sys.stdout.flush()
    s3_bucket = s3_connection.get_bucket(bucket)
    s3_key = S3Key(s3_bucket)
    for f in files:
        s3_key.key = os.path.basename(f)
        with open(f) as opened_file:
            s3_key.set_contents_from_file(opened_file)
    print "Done!"
예제 #8
0
파일: s3.py 프로젝트: alexissmirnov/donomo
def _get_key(s3_path):
    """ Helper function to retrieve an S3Key object representing the given
        path in S3.

        @type  s3_path: string
        @param s3_path: the path (not including the bucket name) in S3
            for which to generate the URL.

        @rtype:   boto.s3.key.Key
        @returns: An S3 Key object for the given s3_path.
    """

    return S3Key(bucket=_get_bucket(), name=s3_path)
예제 #9
0
def fetch_lists():

    """
    Reaches out to S3 and pulls the current word lists down to the local FS.
    """

    conn = boto.connect_s3(AWS_ACCESS, AWS_SECRET)
    bucket = conn.get_bucket(AWS_S3_BUCKET)
    keys = bucket.list()
    for key in keys:
        if re.search('list.yaml', key.key):
            k = S3Key(bucket)
            k.key = key.key
            k.get_contents_to_filename(os.path.join(ROOT_PATH, key.key))
예제 #10
0
 def save_to_s3(key, content):
     conn = S3Connection(settings['aws3_access_key'],
                         settings['aws3_secret_key'])
     bucket = conn.get_bucket(settings['aws3_bucket_name'])
     f = S3Key(bucket)
     f.key = key
     f.set_contents_from_string(content,
                                headers={
                                    'Content-Type':
                                    'application/javascript',
                                    'x-amz-storage-class':
                                    'REDUCED_REDUNDANCY'
                                },
                                policy='public-read')
예제 #11
0
def upload_s3(file, key_name, content_type, bucket_name):
    #s3 connection
    conn = S3Connection(application.config['AWS_ACCESS_KEY_ID'], application.config['AWS_SECRET_ACCESS_KEY'], host='s3.ap-northeast-2.amazonaws.com')

    #upload the file after getting the right bucket
    bucket = conn.get_bucket(bucket_name)
    obj = S3Key(bucket)
    obj.name = key_name
    obj.content_type = content_type
    obj.set_contents_from_string(file.getvalue())
    obj.set_acl('public-read')

    #close stringio object
    file.close()

    return obj.generate_url(expires_in=0, query_auth=False)
def store_it(key, data, content_type):
    try:
        ddb_kvstore.put_item(data={
            'key': key,
            'path': get_storage_path_for(key),
            'body': data,
            'content-type': content_type
        },
                             overwrite=True)
    except ValidationException as e:
        # if we get an "it's too big" exception we'll put it in our s3
        # kvstore 'big stuff' bucket
        newS3Key = S3Key(s3_bucket)
        newS3Key.key = get_storage_path_for(key)
        newS3Key.set_metadata('content-type', content_type)
        newS3Key.set_metadata('key', key)
        newS3Key.set_contents_from_string(data)
def read_it(key):
    try:
        item = ddb_kvstore.get_item(path=get_storage_path_for(key))
        return item['content-type'], item['body'], key
    except ItemNotFound as e:
        # could be because it's super big, so we'll try our s3 bucket before
        # giving up for good
        try:
            s3Key = S3Key(s3_bucket)
            s3Key.key = get_storage_path_for(key)
            body = s3Key.get_contents_as_string()
            content_type = s3Key.get_metadata('content-type')
            return content_type, body, key
        except S3ResponseError as e:
            logging.debug("unable to find item for key %s anywhere\n%s", key,
                          e)

        return None, None, None
예제 #14
0
 def upload_file(self,
                 file,
                 bucket,
                 path,
                 name=None,
                 mimetype=None,
                 ttl=False):
     if isinstance(file, basestring):
         file = open(file, 'r')
     else:
         file.seek(0)
     k = S3Key(self.buckets[bucket])
     k.name = path
     name_escaped = urllib.quote_plus(name.encode('utf8')) if name else path
     s3_headers = {
         'Content-Disposition': 'inline; filename=' + name_escaped,
         'Cache-Control': 'max-age=' + str(86400 * (ttl if ttl else 3650))
     }
     if mimetype: s3_headers['Content-Type'] = mimetype
     k.set_contents_from_file(file, headers=s3_headers)
     k.make_public()
     return self.url(bucket, path)
예제 #15
0
def create_and_upload_index_to_s3(s3, outputs=None):
    outputs = outputs or dict()
    output_key = "StelligentDemoBucketURL"
    bucket_url = ([output.value for output in outputs
                  if output.key == output_key])[0]
    bucket_name = re.sub(r'http://(.*).s3-website.*', r'\1', bucket_url)
    contents = """<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
    <html xmlns="http://www.w3.org/1999/xhtml">
    <head>
    <meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
        <title>Demo Index File in S3 Bucket</title>
    </head>

    <body>
    <h1>Stelligent Demo Stack</h1>
    <pre>"""
    for output in outputs:
        contents += "%40s : %s\n" % (output.key, output.value)
    s3_bucket = s3.get_bucket(bucket_name)
    s3_key = S3Key(s3_bucket)
    s3_key.key = "index.html"
    s3_key.set_metadata('Content-Type', 'text/html')
    s3_key.set_contents_from_string(contents)
    s3_key.set_acl('public-read')
예제 #16
0
def upload(bucket, s3conn, abs_fnm):
    """Uploads a single file to the configured s3 bucket.

    A file is stored in reduced redundancy, with
    public-read acl.

    Params:
      abs_fnm - Absolute path to our file
      bucket - Bucket name we're uploading to
      s3conn - A connection to S3

    Returns:
      link - The public link to the uploaded object, or an
      empty string if the upload didn't complete.
    """
    fnm = os.path.basename(abs_fnm)
    b = S3Bucket(connection=s3conn, name=bucket)
    k = S3Key(b)
    k.key = fnm
    local_size = os.stat(abs_fnm).st_size
    up_size = k.set_contents_from_filename(abs_fnm,
                                           reduced_redundancy=True,
                                           policy="public-read")
    return fnm if local_size == up_size else ""
예제 #17
0
def set_stack_name_in_s3(s3_connection, stack_name, dest_name, bucket):
    s3_bucket = s3_connection.get_bucket(bucket)
    s3_key = S3Key(s3_bucket)
    s3_key.key = dest_name
    s3_key.set_contents_from_string(stack_name)
예제 #18
0
 def file_exists(self, bucket, path):
     k = S3Key(self.buckets[bucket])
     k.name = path
     return k.exists()
예제 #19
0
print('Config.AWS_SECRET_ACCESS_KEY', Config.AWS_SECRET_ACCESS_KEY)
print('Config.BUCKET_NAME', Config.BUCKET_NAME)

# s3_conn = S3Connection(Config.AWS_ACCESS_KEY_ID, Config.AWS_SECRET_ACCESS_KEY)
# s3_bucket = s3_conn.get_bucket(Config.BUCKET_NAME)

s3_conn = boto.s3.connect_to_region(
    'ap-southeast-1',
    aws_access_key_id=Config.AWS_ACCESS_KEY_ID,
    aws_secret_access_key=Config.AWS_SECRET_ACCESS_KEY,
    is_secure=False,  # uncomment if you are not using ssl
    calling_format=boto.s3.connection.OrdinaryCallingFormat(),
)
s3_bucket = s3_conn.get_bucket(Config.BUCKET_NAME)

s3_obj = S3Key(s3_bucket)


def create_app(config_class=Config):
    app = Flask(__name__)
    app.config.from_object(config_class)

    db.init_app(app)
    migrate.init_app(app, db)
    login.init_app(app)
    CORS(app)
    app.elasticsearch = Elasticsearch([app.config['ELASTICSEARCH_URL']]) \
        if app.config['ELASTICSEARCH_URL'] else None
    from app.errors import bp as errors_bp
    app.register_blueprint(errors_bp)
    # ? from app.auth import bp as auth_bp