Пример #1
0
    def upload_request(self, data, bucket_name=None):
        '''
        Upload any given data to S3.
        '''

        if bucket_name is not None:
            create_bucket = False
        else:
            bucket_name = self.job_name
            create_bucket = True

        if not isinstance(data, list):
            data = [data]

        self._track_uploaded = dict.fromkeys(data)

        for dat in data:
            try:
                upload_to_s3(bucket_name, dat, create_bucket=create_bucket,
                             aws_access=self.credentials)
                self._track_uploaded[dat] = "Success"
            except Exception as e:
                self._track_uploaded[dat] = "Failed with: " + e
                return False

        return True
Пример #2
0
    def upload_results(self, make_tar=False):

        if not self.empty_flag:
            if len(self.output_files) == 0:
                self.message_dict['upload_results'] = "No output files found."
            else:
                if make_tar:
                    # Create a tar file and only upload it.
                    import tarfile
                    tar = tarfile.open("data_products.tar", "w:")
                    for name in self.output_files:
                        tar.add(name)
                    tar.close()
                    self.output_files = ["data_products.tar"]

                try:
                    for out in self.output_files:
                        upload_to_s3(self.bucket_name, out,
                                     aws_access=self.credentials,
                                     create_bucket=False,
                                     key_prefix="data_products/")
                    self.message_dict['upload_results'] = \
                        "Successfully uploaded results."
                except Exception:
                    self.message_dict['upload_results'] = tr.format_exc()
                    self.success = False
Пример #3
0
    def upload_request(self, data, bucket_name=None):
        '''
        Upload any given data to S3.
        '''

        if bucket_name is not None:
            create_bucket = False
        else:
            bucket_name = self.job_name
            create_bucket = True

        if not isinstance(data, list):
            data = [data]

        self._track_uploaded = dict.fromkeys(data)

        for dat in data:
            try:
                upload_to_s3(bucket_name, dat, create_bucket=create_bucket,
                             aws_access=self.credentials)
                self._track_uploaded[dat] = "Success"
            except Exception as e:
                self._track_uploaded[dat] = "Failed with: " + e
                return False

        return True
#!/usr/bin/env python

''' Upload data to AWS S3 bucket (which can then be downloaded with download_data_AWS.py)
	Notes: No capitals allowed in bucket name
'''
import sys
sys.path.append('PATH_TO_AWS_CONTROLLER')
from upload_download_s3 import upload_to_s3

bucket_name='BUCKET_NAME_HERE'
upload_item='PATH_TO_DATA_ON_YOUR_SYSTEM'

upload_to_s3(bucket_name, upload_item,
                 create_bucket=True, chunk_size=52428800, conn=None,
                 aws_access={'aws_access_key_id': 'ACCESS_KEY_HERE',
 'aws_secret_access_key': 'SECRET_ACCESS_KEY_HERE'}, replace=False)
Пример #5
0
    key = info[0][1]
    secret = info[1][1]

    # Create a test file and upload to S3
    if not os.path.exists("tests/test.txt"):
        test_string = "ALLGLORYTOTHEHYPNOTOAD"

        with open("tests/test.txt", "w") as f:
            f.write(test_string)

    print("Uploading to S3")

    upload_to_s3(proc_name,
                 "tests/test.txt",
                 key_prefix="data/",
                 aws_access={
                     "aws_access_key_id": key,
                     "aws_secret_access_key": secret
                 },
                 create_bucket=True)

    # Create an SQS queue and message for the worker
    queue = sqs.connect_to_region(region).create_queue(proc_name)

    mess = {}
    mess["proc_name"] = proc_name
    mess["bucket"] = proc_name
    mess['key_name'] = "data/test.txt"
    mess['command'] = ["ls /home/ubuntu/data", "ls -la /home/ubuntu/data/"]
    mess['parameters'] = ""

    mess = queue.new_message(body=json.dumps(mess))
Пример #6
0
    config.load_credential_file(os.path.join(os.path.expanduser("~"),".aws/credentials"))
    info = config.items("default")[2:]
    key = info[0][1]
    secret = info[1][1]

    # Create a test file and upload to S3
    if not os.path.exists("tests/test.txt"):
        test_string = "ALLGLORYTOTHEHYPNOTOAD"

        with open("tests/test.txt", "w") as f:
            f.write(test_string)

    print("Uploading to S3")

    upload_to_s3(proc_name, "tests/test.txt", key_prefix="data/",
                 aws_access={"aws_access_key_id": key,
                             "aws_secret_access_key": secret},
                 create_bucket=True)

    # Create an SQS queue and message for the worker
    queue = sqs.connect_to_region(region).create_queue(proc_name)

    mess = {}
    mess["proc_name"] = proc_name
    mess["bucket"] = proc_name
    mess['key_name'] = "data/test.txt"
    mess['command'] = ["ls /home/ubuntu/data", "ls -la /home/ubuntu/data/"]
    mess['parameters'] = ""

    mess = queue.new_message(body=json.dumps(mess))
    queue.write(mess)
#!/usr/bin/env python
''' Upload data to AWS S3 bucket (which can then be downloaded with download_data_AWS.py)
	Notes: No capitals allowed in bucket name
'''
import sys
sys.path.append('PATH_TO_AWS_CONTROLLER')
from upload_download_s3 import upload_to_s3

bucket_name = 'BUCKET_NAME_HERE'
upload_item = 'PATH_TO_DATA_ON_YOUR_SYSTEM'

upload_to_s3(bucket_name,
             upload_item,
             create_bucket=True,
             chunk_size=52428800,
             conn=None,
             aws_access={
                 'aws_access_key_id': 'ACCESS_KEY_HERE',
                 'aws_secret_access_key': 'SECRET_ACCESS_KEY_HERE'
             },
             replace=False)