def __init__(self, conf: DictConfig): self.config = conf boto3_client = boto3.client('s3', region_name=self.config.s3.aws_region, config=Config()) object_data_session_factory = ObjectDataSessionFactory(boto3_client) self.object = ObjectsService( ObjectsDataAccess(object_data_session_factory))
class AWSConfig(object): """docstring for AWSConfig""" DEBUG = True # edit the URI below to add your RDS password and your AWS URL # The other elements are the same as used in the tutorial # format: (user):(password)@(db_identifier).amazonaws.com:3306/(db_name) SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://ualberta:[email protected]:3306/user_db' SQLALCHEMY_POOL_RECYCLE = 3600 config = Config() config.load_credential_file( os.path.join(os.path.expanduser("~"), ".aws/credentials")) info = config.items("default")[2:] AWS_KEY = info[0][1] AWS_SECRET = info[1][1] WTF_CSRF_ENABLED = True SECRET_KEY = 'cheese'
from upload_download_s3 import upload_to_s3, download_from_s3, remove_s3_bucket from utils import timestring from boto import sqs, ec2, s3 from boto import Config import json from time import sleep, time import os # Assuming that the testing system has the AWS config already set. try: proc_name = "aws_ska_test_worker_" + timestring() region = "us-west-2" # Read in credentials config = Config() config.load_credential_file(os.path.expanduser("~/.aws/credentials")) info = config.items("default")[2:] key = info[0][1] secret = info[1][1] # Create a test file and upload to S3 if not os.path.exists("tests/test.txt"): test_string = "ALLGLORYTOTHEHYPNOTOAD" with open("tests/test.txt", "w") as f: f.write(test_string) print("Uploading to S3") upload_to_s3(proc_name,
from upload_download_s3 import upload_to_s3, download_from_s3, remove_s3_bucket from utils import timestring from boto import sqs, ec2, s3 from boto import Config import json from time import sleep, time import os # Assuming that the testing system has the AWS config already set. try: proc_name = "aws_ska_test_worker_" + timestring() region = "us-west-2" # Read in credentials config = Config() config.load_credential_file(os.path.join(os.path.expanduser("~"),".aws/credentials")) info = config.items("default")[2:] key = info[0][1] secret = info[1][1] # Create a test file and upload to S3 if not os.path.exists("tests/test.txt"): test_string = "ALLGLORYTOTHEHYPNOTOAD" with open("tests/test.txt", "w") as f: f.write(test_string) print("Uploading to S3") upload_to_s3(proc_name, "tests/test.txt", key_prefix="data/",
dataVariation - the variation of data to use [optional] jobAwsKey - aws key for job to access S3 [optional] jobAwsSecret - aws secret for job to access S3 [optional] core-count - the number of core instances to run [optional] spot-count - the number of spot instances to run [optional] spot-bid - spot instance bid price [optional] availabilityZone - availability zone to launch EMR jobs from [optional] keep-alive - keep servers around after job [optional] """ def usage(): print usage_string sys.exit() if __name__ == '__main__': boto_config = BotoConfig() try: opts, args = getopt.getopt(sys.argv[1:],'', ['awsKey=','awsSecret=','jobAwsKey=','jobAwsSecret=','s3Bucket=','core-count=','spot-count=','spot-bid=','keypair=','jobName=','data-region-code=','data-variation=','availabilityZone=','keep-alive']) except: usage() # set your aws keys and S3 bucket, e.g. from environment or .boto params = {'aws_key' : None or boto_config.get('Credentials', 'aws_access_key_id'), 'secret' : None or boto_config.get('Credentials', 'aws_secret_access_key'), 'job_aws_key' : None or boto_config.get('Credentials', 'aws_access_key_id'), 'job_aws_secret' : None or boto_config.get('Credentials', 'aws_secret_access_key'), 'keypair' : None, 's3_bucket' : None, 'job_name' : None,
from boto import Config as BotoConfig # Description: # How to use boto's config library boto_config = BotoConfig() aws_key = boto_config.get('Credentials', 'aws_access_key_id') aws_secret = boto_config.get('Credentials', 'aws_secret_access_key')