Exemple #1
0
    def uploadFileToS3(self, filename):
        '''Uploads file to S3.'''
        destDir = ''  # Root folder of the S3 bucket
        destpath = os.path.join(destDir, os.path.basename(filename))
        print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
Exemple #2
0
 def transfer_files(self):
     from boto.s3.connection import S3Connection
     from boto.s3.connection import Key
     conn = S3Connection(self.extra_args['aws_access_key'],
                         self.extra_args['aws_secret_key'])
     bucket = conn.get_bucket(self.extra_args['s3_bucket'])
     for fname in self.files:
         key = Key(bucket)
         key.key = os.path.basename(fname)
         key.set_contents_from_filename(fname)
Exemple #3
0
	def transfer_files(self):
		from boto.s3.connection import S3Connection
		from boto.s3.connection import Key
		conn = S3Connection(self.extra_args['aws_access_key'],
				self.extra_args['aws_secret_key'])
		bucket = conn.get_bucket(self.extra_args['s3_bucket'])
		for fname in self.files:
			key = Key(bucket)
			key.key = os.path.basename(fname)
			key.set_contents_from_filename(fname)
Exemple #4
0
    def uploadFileToS3(self, filename):  # pylint: disable=invalid-name,missing-param-doc,missing-type-doc
        """Upload file to S3."""
        # Root folder of the S3 bucket
        destDir = ""  # pylint: disable=invalid-name
        destpath = os.path.join(destDir, os.path.basename(filename))
        print(f"Uploading {filename} to Amazon S3 bucket {self.bucket_name}")

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
Exemple #5
0
    def uploadFileToS3(self, filename):
        '''Uploads file to S3.'''
        destDir = ''  # Root folder of the S3 bucket
        destpath = os.path.join(destDir, os.path.basename(filename))
        print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
        print  # This newline is needed to get the path of the compiled binary printed on a newline.
Exemple #6
0
    def uploadFileToS3(self, filename):
        """Uploads file to S3."""
        destDir = ""  # Root folder of the S3 bucket
        destpath = os.path.join(destDir, os.path.basename(filename))
        print "Uploading %s to Amazon S3 bucket %s" % (filename, self.bucket_name)

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
        print  # This newline is needed to get the path of the compiled binary printed on a newline.
Exemple #7
0
    def upload(self, local_filepath, aws_filepath):
        """
        Uploads `local_filepath` to `aws_filepath`.

        Returns the published URL for the file.
        """
        logging.info('Publishing %s to %s' % (local_filepath, aws_filepath))

        key = Key(bucket=self.bucket, name=aws_filepath)
        key.key = aws_filepath
        key.set_contents_from_filename(local_filepath)
        key.set_acl('public-read')
Exemple #8
0
def upload_file(conn, full_path):
    b = Bucket(conn, BUCKET)
    k = Key(b)
    k.key = full_path
    expires = datetime.utcnow() + timedelta(days=(25 * 365))
    expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
    k.set_metadata("Content-Type", mimetypes.guess_type(full_path)[0])
    k.set_metadata("Expires", expires)
    k.set_metadata("Cache-Control", "max-age={0}, public".format(25 * 365 * 36400))
    k.set_contents_from_filename(full_path)
    k.set_acl('public-read')
    print "{} -> http://s3.amazonaws.com/yaluandmike/{}".format(full_path, full_path)
	def push_s3_file(self, bucket, src=None, key=None):
		"""
		Upload a file to an S3 bucket
		"""
		if not src:	src = self.working_file
		if not key: key = os.path.basename(src)
		conn = boto.connect_s3(self.aws_id, self.aws_key)
		b = conn.create_bucket(bucket)
		k = Key(b)
		k.key = key
		k.set_contents_from_filename(src)
		self.working_file = '%s/%s' % (bucket, key)
		return self.working_file
    def upload(self, local_filepath, aws_filepath):
        """
        Uploads `local_filepath` to `aws_filepath`.

        Returns the published URL for the file.
        """
        logging.info('Publishing %s to %s' % (
                local_filepath, aws_filepath))

        key = Key(bucket=self.bucket, name=aws_filepath)
        key.key = aws_filepath
        key.set_contents_from_filename(local_filepath)
        key.set_acl('public-read')
def uploadToS3():

    key = input("Enter the AWSKey:")
    secretkey = input("Enter the Secret Key:")
    awsKey = key  #
    awsSecret = secretkey  #

    bucname = input("Enter the bucket name:")

    conn = S3Connection(awsKey, awsSecret)
    #print(conn)
    print("inside Upload function..")
    #Connecting to a bucket
    bucket_name = bucname  #"luigibuckets"
    bucket = conn.get_bucket(bucket_name)
    print(bucket)
    #Setting the keys
    k = Key(bucket)
    print(k)
    k.key = "XGB_File_400trees_TrainDatafinal.csv"
    k.set_contents_from_filename("XGB_File_400trees_TrainDatafinal.csv")

    k2 = Key(bucket)
    k2.key = "documents_meta.csv"
    k2.set_contents_from_filename("documents_meta.csv")

    k3 = Key(bucket)
    k3.key = "xgb_results.csv"
    k3.set_contents_from_filename("xgb_results.csv")

    k4 = Key(bucket)
    k4.key = "xgboost.pkl"
    k4.set_contents_from_filename("xgboost.pkl")

    print('Upload Completed..')
def upload():
    s3_conn = s3()
    
#     bucket = s3_conn.create_bucket('distributed-web-crawler')
    bucket = Bucket(s3_conn, 'distributed-web-crawler')
    
    k = Key(bucket)
    
    k.key = 'list_links_a.txt'
    k.set_contents_from_filename('input_links_a.txt')
    
    os.remove('input_links_a.txt')
    
    s3_conn.close()
Exemple #13
0
def upload():
    s3_conn = s3()

    #     bucket = s3_conn.create_bucket('distributed-web-crawler')
    bucket = Bucket(s3_conn, 'distributed-web-crawler')

    k = Key(bucket)

    k.key = 'list_links_a.txt'
    k.set_contents_from_filename('input_links_a.txt')

    os.remove('input_links_a.txt')

    s3_conn.close()
def uploadToS3():
    awsKey = ''
    awsSecret = ''

    conn = S3Connection(awsKey, awsSecret)
    #print(conn)

    #Connecting to a bucket
    bucket_name = "luigibuckets"
    bucket = conn.get_bucket(bucket_name)
    #print(bucket)
    #Setting the keys
    k = Key(bucket)
    print(k)
    k.key = "LoanStats_Validated.csv"
    k.set_contents_from_filename("LoanStats_Validated.csv")
Exemple #15
0
def upload_content(bucket=None, key_name=None, 
                    data_type=kUploadContentType.String, data=None) :
    bucket = get_bucket(bucket)
    bucketKey = Key(bucket)
    bucketKey.key = key_name
    try :
        if data_type == kUploadContentType.String :
            bucketKey.set_contents_from_string(data)
        elif data_type == kUploadContentType.File :
            bucketKey.set_contents_from_file(data)
        elif data_type == kUploadContentType.FileName(data) :
            bucketKey.set_contents_from_filename(data)
        elif data_type == kUploadContentType.Stream :
            bucketKey.set_contents_from_stream(data)
        return True
    except Exception, e :
        return False
Exemple #16
0
def main(bucket_name, image_name):
    AWS_ACCESS_KEY_ID = 'Public ACCESS ID'
    AWS_SECRET_ACCESS_KEY = 'SECRET KEY GOES HERE'

    bucket_name = str(bucket_name)
    conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)

    bucket = Bucket(conn, bucket_name)

    testfile = str(image_name)
    print('Uploading %s to Amazon S3 bucket %s' % (testfile, bucket_name))

    def percent_cb(complete, total):
        sys.stdout.write('.')
        sys.stdout.flush()

    k = Key(bucket)
    k.key = str(image_name)
    k.set_contents_from_filename(testfile, cb=percent_cb, num_cb=10)
Exemple #17
0
 def output(self):
     
     #Create a connection
     access_key = self.awsKey
     #print(access_key)
     acess_secret = self.awsSecret
     conn = S3Connection(access_key,acess_secret)
     #print(conn)
     
     #Connecting to a bucket
     bucket_name = "luigibuckets"
     bucket = conn.get_bucket(bucket_name)
     #print(bucket)
     
     #Setting the keys
     k = Key(bucket)
     print (k)
     k.key = "squares.txt"
     k.set_contents_from_filename("squares.txt")
Exemple #18
0
def uploadToS3():
    aKey = input("Enter the Key:")
    aSKey = input("Enter the Secret Key:")

    awsKey = aKey
    awsSecret = aSKey

    conn = S3Connection(awsKey, awsSecret)
    #print(conn)

    #Connecting to a bucket
    bucket_name = "luigibuckets"
    bucket = conn.get_bucket(bucket_name)
    #print(bucket)
    #Setting the keys
    k = Key(bucket)
    print(k)
    k.key = "ValidatedCombinedRejectLoan.csv"
    k.set_contents_from_filename("ValidatedCombinedRejectLoan.csv")
Exemple #19
0
def upload_image(file_path, file_name):
    '''Get a upload url
    Return key_name, url
    '''
    conn = S3Connection(S3_ACCESS_KEY, S3_SECRET_ACCESS_KEY)
    bucket = conn.get_bucket(S3_BUCKET_NAME)

    def percent_cb(complete, total):
        sys.stdout.write('.')
        sys.stdout.flush()

    k = Key(bucket)
    k.key = file_name
    if file_path:
        file_path = file_path + file_name
    else:
        file_path = "/home/saksham/startupportal/portal/media/" + (str(file_name))
    
    k.set_contents_from_filename(file_path, cb=percent_cb, num_cb=10)
    k.set_acl('public-read')
    win_botohelper_logger.debug("upload_image doc upload with %s name" % str(file_name) )
    return file_name
Exemple #20
0
    def s3_put_directory(s3_dir, local_dir):
        logger.info("Putting file://%s to s3://%s/%s" %
                    (os.path.abspath(local_dir), aws_bucket.name, s3_dir) +
                    " (DRY RUN)" if settings.S3_DRY_RUN else "")

        if not settings.S3_DRY_RUN:
            for dirname, dirnames, filenames in os.walk(local_dir):
                for filename in filenames:
                    if filename == ".DS_Store":
                        continue
                    filepath = os.path.join(dirname, filename)
                    relpath = os.path.relpath(filepath, local_dir)
                    key = os.path.join(s3_dir, relpath)
                    aws_key = aws_bucket.get_key(key)
                    if aws_key:
                        # assume the content of file did not change if md5 hashes are consistent.
                        if aws_key.etag.strip("\"") == calc_file_md5(filepath):
                            continue
                    else:
                        aws_key = Key(aws_bucket, key)
                    aws_key.set_contents_from_filename(filepath)
                    aws_key.set_acl('public-read')
Exemple #21
0
    def s3_put_directory(s3_dir, local_dir):
        logger.info("Putting file://%s to s3://%s/%s" % (
            os.path.abspath(local_dir), aws_bucket.name, s3_dir) +
                " (DRY RUN)" if settings.S3_DRY_RUN else "")

        if not settings.S3_DRY_RUN:
            for dirname, dirnames, filenames in os.walk(local_dir):
                for filename in filenames:
                    if filename == ".DS_Store":
                        continue
                    filepath = os.path.join(dirname, filename)
                    relpath = os.path.relpath(filepath, local_dir)
                    key = os.path.join(s3_dir, relpath)
                    aws_key = aws_bucket.get_key(key)
                    if aws_key:
                        # assume the content of file did not change if md5 hashes are consistent.
                        if aws_key.etag.strip("\"") == calc_file_md5(filepath):
                            continue
                    else:
                        aws_key = Key(aws_bucket, key)
                    aws_key.set_contents_from_filename(filepath)
                    aws_key.set_acl('public-read')
Exemple #22
0
 def sync_dir(self, directory):
     k = Key(self.bucket_id)
     tree = self._get_tree(directory)
     for fname in tree:
        k.key = fname
        k.set_contents_from_filename(fname)
Exemple #23
0
def process_file(aws_conn, filepath):
    mtime = get_mtime(filepath)

    name_200 = add_size_name(filepath, '200')
    name_800 = add_size_name(filepath, '800')

    mtime_200 = get_mtime(name_200)
    mtime_800 = get_mtime(name_800)

    im = None
    if mtime_200 is None or mtime_200 < mtime:
        try:
            im = Image.open(filepath)
        except:
            return None
        generate_200(im, name_200)

    if mtime_800 is None or mtime_800 < mtime:
        if im is None:
            try:
                im = Image.open(filepath)
            except:
                return None
        generate_800(im, name_800)

    names = {
        'original': filepath,
        'thumbnail': name_200,
        'display': name_800,
        }


    b = Bucket(aws_conn, BUCKET)

    image_result = {}

    for image_type, name in names.items():
        aws_tag_path = add_size_name(name, 's3t') + '.meta'
        aws_key_path = name[len(GALLERY_DIR):].strip('/')

        image_result[image_type] = {
            'url': 'http://s3.amazonaws.com/{}/{}'.format(
            BUCKET,
            aws_key_path)
            }

        if not is_newer(name, aws_tag_path):
            try:
                resolution = load_data(aws_tag_path)
                resolution['width']
            except:
                resolution = get_resolution(name)
                save_data(aws_tag_path, resolution)
            image_result[image_type].update(resolution)
            continue


        resolution = get_resolution(name)
        image_result.update(resolution)
        save_data(aws_tag_path, resolution)

        s3key = b.get_key(aws_key_path)
        mtime = get_mtime(name)

        if s3key and s3key.last_modified:
            print datetime.datetime(*parsedate(s3key.last_modified)[:6])
            print mtime
            if datetime.datetime(*parsedate(s3key.last_modified)[:6]) > mtime:
                with open(aws_tag_path, 'a'):
                    os.utime(aws_tag_path, None)
                continue
        print 'Sending {} to S3'.format(name)
        k = Key(b)
        k.key = aws_key_path
        expires = datetime.datetime.utcnow() + datetime.timedelta(days=25 * 365)
        expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
        k.set_metadata("Content-Type", mimetypes.guess_type(name)[0])
        k.set_metadata("Expires", expires)
        k.set_metadata("Cache-Control", "max-age={0}, public".format(86400 * 365 * 25))
        k.set_contents_from_filename(name)
        k.set_acl('public-read')

        with open(aws_tag_path, 'a'):
            os.utime(aws_tag_path, None)

    photo_age = get_photo_age(filepath)

    image_result['caption'] = get_caption(filepath)

    return photo_age, image_result
 def upload_file_to_s3(self, file_name):
     path = '' #Directory Under which file should get upload
     full_key_name = os.path.join(path, file_name)
     k = Key(self.s3_bucket)
     k.key = 'test_key'
     k.set_contents_from_filename(full_key_name)
Exemple #25
0
from boto.s3.connection import S3Connection, Key
from os.path import join, exists
from os import environ
import sys


def percent_cb(complete, total):
    sys.stdout.write('.')
    sys.stdout.flush()


try:
    backup_file = environ.get("BACKUP_FILE")
    backup_file += ".sql"
    file = join("./", backup_file)
    print("backup file path: %s" % backup_file)
    if exists(file):
        conn = S3Connection(environ.get('AWS_ACCESS_KEY_ID'),
                            environ.get('AWS_SECRET_ACCESS_KEY'),
                            host=environ.get('AWS_S3_HOST'))
        bucket = conn.get_bucket(environ.get('AWS_S3_BUCKET'))
        file = Key(bucket)
        file.key = backup_file
        print("uploading %s to s3" % backup_file)
        file.set_contents_from_filename(backup_file, replace=True, cb=percent_cb, num_cb=15)
        print("\nupload was complete")

except Exception as e:
    print("Error occurred: %s" % e)
    exit(1)
Exemple #26
0

# In[7]:

def uploadToS3():
	aKey = input("Enter the Key:")
    aSKey = input("Enter the Secret Key:")
    
    
    awsKey = aKey 
    awsSecret = aSKey '
    
    conn = S3Connection(awsKey,awsSecret)
        #print(conn)
        
        #Connecting to a bucket
    bucket_name = "luigibuckets"
    bucket = conn.get_bucket(bucket_name)
        #print(bucket)
        #Setting the keys
    k = Key(bucket)
    print (k)
    k.key = "ValidatedCombinedRejectLoan.csv"
    k.set_contents_from_filename("ValidatedCombinedRejectLoan.csv")


# In[ ]:



Exemple #27
0
# MAGIC * Use the **boto** library in python to programmatically write data to S3.
# MAGIC * Below is sample code - replace with your own values.
# MAGIC * In this example, a file is downloaded from a URL.
# MAGIC * You can generate local files to write to S3 other way you'd like as well.

# COMMAND ----------

url_to_retrieve = getArgument("1. URL_TO_RETRIEVE", "https://github.com/apache/spark/blob/master/README.md")
tmpfile = getArgument("2. TMP_FILE_PATH", "/tmp/spark_README.md")
s3_filename = getArgument("3. S3_FILE_PATHNAME", "/my-s3-bucket/spark_README.md")

# COMMAND ----------

# Download a url and save to a local file.
import urllib

urllib.urlretrieve(url_to_retrieve, tmpfile)

# COMMAND ----------

# Note: boto should be available.
from boto.s3.connection import S3Connection
from boto.s3.connection import Key

# Open a connection to S3.
conn = S3Connection(ACCESS_KEY, SECRET_KEY)
bucket = conn.get_bucket(AWS_BUCKET_NAME)
k = Key(bucket)
k.key = s3_filename
k.set_contents_from_filename(tmpfile)