Beispiel #1
0
    def test_profile_default(self):
        self.bc_session_cls.return_value.profile = None

        session = Session()

        self.assertEqual(session.profile_name, 'default')
Beispiel #2
0
 def test_can_reach_events(self):
     mock_bc_session = self.bc_session_cls()
     session = Session(botocore_session=mock_bc_session)
     session.events
     mock_bc_session.get_component.assert_called_with('event_emitter')
Beispiel #3
0
    def test_arguments_not_required(self):
        Session()

        self.assertTrue(self.bc_session_cls.called,
                        'Botocore session was not created')
Beispiel #4
0
def try_request():
    headers = {
        "Content-Type": "application/pdf",
    }

    client = boto3.client("sts")
    account_id = client.get_caller_identity()["Account"]
    print(account_id)

    role_to_assume = "arn:aws:iam::248804316466:role/operator"
    response = client.assume_role(RoleArn=role_to_assume,
                                  RoleSessionName="assumed_role")

    session = Session(
        aws_access_key_id=response["Credentials"]["AccessKeyId"],
        aws_secret_access_key=response["Credentials"]["SecretAccessKey"],
        aws_session_token=response["Credentials"]["SessionToken"],
    )

    client = session.client("sts")
    account_id = client.get_caller_identity()["Account"]
    print(account_id)

    credentials = session.get_credentials()

    credentials = credentials.get_frozen_credentials()
    access_key = credentials.access_key
    secret_key = credentials.secret_key
    token = credentials.token

    auth = AWS4Auth(
        access_key,
        secret_key,
        "eu-west-1",
        "execute-api",
        session_token=token,
    )

    method = "PUT"
    payload = {
        "checklist": {
            "data": {
                "type": "supportingdocuments",
                "attributes": {
                    "submission_id": 1234
                },
                "file": {
                    "name": "blah.pdf",
                    "mimetype": "application/pdf",
                    "source": "string",
                },
            }
        }
    }

    url = "https://sq25usy81d.execute-api.eu-west-1.amazonaws.com/v1/clients/33205624/reports/123/checklists/123"
    # url = "https://sq25usy81d.execute-api.eu-west-1.amazonaws.com/v1/flask/clients/33205624/reports"

    body = json.dumps(payload)

    response = requests.request(method=method,
                                url=url,
                                auth=auth,
                                data=body,
                                headers=headers)

    print(response.text)
    print(response.status_code)
Beispiel #5
0
    def test_create_client(self):
        session = Session(region_name='us-east-1')
        client = session.client('sqs', region_name='us-west-2')

        self.assertTrue(client, 'No low-level client was returned')
Beispiel #6
0
def s3_rename(output_dir, old, new, sse_kms_key_id):
    # move the old data into the new area
    session = Session()
    client = session.client('s3')
    s3 = session.resource('s3')
    s3_path, bucket, key_prefix = parse_s3(output_dir)

    # Ensure bucket exists
    try:
        client.head_bucket(Bucket=bucket)
    except ClientError:
        raise ArgumentError('S3 bucket {} does not exist.'.format(bucket))

    log.info('Retrieving list of S3 objects to rename in bucket "{}"'.format(
        bucket))
    paginator = client.get_paginator('list_objects_v2')
    rename_iterator = paginator.paginate(Bucket=bucket, Prefix=old + '/')
    obj_count = 0

    for page in rename_iterator:
        # loop through the pages of results renaming

        if page.get('Contents') is None:
            raise ArgumentError('Key {} does not exist in bucket {}'.format(
                old, bucket))

        # Loop through the old objects copying and deleting
        for obj in page.get('Contents'):
            old_key = obj.get('Key')
            old_meta = client.head_object(Bucket=bucket, Key=old_key)
            old_sse_type = old_meta.get('ServerSideEncryption')
            old_sse_key = old_meta.get('SSEKMSKeyId')
            new_key = new + old_key[len(old):]

            # check that we haven't already run and have existing data
            # in the new key
            new_obj = s3.Object(bucket, new_key)
            if new_key == old_key:
                log.debug(('Old and new keys match and new SSEKMSKeyId '
                           'Specified, re-encrypting {}').format(new_obj.key))
            else:
                try:
                    new_obj.load()
                    if new_key != old_key:
                        log.info('Skipping existing output in new '
                                 'location: {}'.format(new_obj.key))
                        continue
                except ClientError as e:
                    response_code = e.response.get('Error').get('Code')
                    if response_code == '404':
                        # the obj doesn't exist so we will copy
                        # the existing obj to the new spot
                        pass
                    else:
                        raise

            copy_from_args = dict(CopySource={
                'Bucket': bucket,
                'Key': old_key,
                'MetadataDirective': 'COPY'
            })

            if sse_kms_key_id:
                # Re-encrypt with a new key
                copy_from_args['ServerSideEncryption'] = 'aws:kms'
                copy_from_args['SSEKMSKeyId'] = sse_kms_key_id
            if not sse_kms_key_id and old_sse_type == 'aws:kms':
                # Re-encrypt with the existing key
                copy_from_args['ServerSideEncryption'] = 'aws:kms'
                copy_from_args['SSEKMSKeyId'] = old_sse_key
            if not sse_kms_key_id and old_sse_type == 'AES256':
                # Re-encrypt with the existing AES256
                copy_from_args['ServerSideEncryption'] = 'AES256'

            new_obj.copy_from(**copy_from_args)
            log.debug('Renamed "{}" to "{}"'.format(old_key, new_key))
            # Either way, we delete the old object unless we are inplace
            # re-encrypting
            if new_key != old_key:
                s3.Object(bucket, old_key).delete()
                log.debug('Deleted "{}"'.format(old_key))
            obj_count += 1

        log.info(('Finished renaming/re-encrypting '
                  '{} objects').format(obj_count))
Beispiel #7
0
from pyqldb.driver.qldb_driver import QldbDriver
from pyqldb.errors import DriverClosedError
from pyqldb.session.qldb_session import QldbSession

DEFAULT_SESSION_NAME = 'qldb-session'
DEFAULT_MAX_CONCURRENT_TRANSACTIONS = 10
DEFAULT_READ_AHEAD = 0
DEFAULT_RETRY_LIMIT = 4
DEFAULT_BACKOFF_BASE = 10
DEFAULT_TIMEOUT_SECONDS = 0.001
EMPTY_STRING = ''
MOCK_CONFIG = Config()
MOCK_LEDGER_NAME = 'QLDB'
MOCK_MESSAGE = 'message'
MOCK_BOTO3_SESSION = Session()
MOCK_LIST_TABLES_RESULT = ['Vehicle', 'Person']


class TestQldbDriver(TestCase):
    @patch('pyqldb.driver.qldb_driver.Queue')
    @patch('pyqldb.driver.qldb_driver.AtomicInteger')
    @patch('pyqldb.driver.qldb_driver.BoundedSemaphore')
    @patch('pyqldb.driver.qldb_driver.client')
    @patch('pyqldb.driver.qldb_driver.Config.merge')
    def test_constructor_with_valid_config(self, mock_config_merge,
                                           mock_client, mock_bounded_semaphore,
                                           mock_atomic_integer, mock_queue):
        mock_queue.return_value = mock_queue
        mock_atomic_integer.return_value = mock_atomic_integer
        mock_bounded_semaphore.return_value = mock_bounded_semaphore
Beispiel #8
0
    def test_get_available_resources(self, list_mock, exist_mock, dir_mock):
        session = Session()
        self.loader.get_search_paths.return_value = ['search-path']

        names = session.get_available_resources()
        self.assertEqual(sorted(names), ['s3', 'sqs'])
Beispiel #9
0
import sys
import json
import operator
import datetime
import numpy as np
from sklearn.metrics.pairwise import pairwise_distances
"""reload intepretor, add credential path"""
reload(sys)
sys.setdefaultencoding('UTF8')
"""import credentials from root/AppCreds"""
with open(os.path.dirname(sys.path[0]) + '/AppCreds/AWSAcct.json',
          'r') as AWSAcct:
    awsconf = json.loads(AWSAcct.read())

dynamodb_session = Session(
    aws_access_key_id=awsconf["aws_access_key_id"],
    aws_secret_access_key=awsconf["aws_secret_access_key"],
    region_name="us-east-1")
dynamodb = dynamodb_session.resource('dynamodb')


def user_based_similarity():
    tb_preference = dynamodb.Table("Preference")
    raw = tb_preference.scan()
    data = raw['Items']
    # print data
    cols_count, rows_count = 6, len(data)
    matrix = [[0 for x in range(cols_count)] for y in range(rows_count)]
    for i in range(0, len(data)):
        matrix[i][0] = int(data[i]['food'])
        matrix[i][1] = int(data[i]['movie'])
        matrix[i][2] = int(data[i]['study'])
Beispiel #10
0
else:
    logging.basicConfig(
        level=os.getenv("LOGLEVEL", "INFO"),
        format="%(threadName)s %(levelname)s %(name)s - %(message)s")

logger = logging.getLogger('advisor-pup')
try:
    with open('/var/run/secrets/kubernetes.io/serviceaccount/namespace',
              'r') as f:
        NAMESPACE = f.read()
except EnvironmentError:
    logger.info('Not Running on Openshift')

if (configuration.AWS_ACCESS_KEY_ID and configuration.AWS_SECRET_ACCESS_KEY):
    CW_SESSION = Session(
        aws_access_key_id=configuration.AWS_ACCESS_KEY_ID,
        aws_secret_access_key=configuration.AWS_SECRET_ACCESS_KEY,
        region_name=configuration.AWS_REGION_NAME)
    cw_handler = watchtower.CloudWatchLogHandler(
        boto3_session=CW_SESSION,
        log_group=configuration.LOG_GROUP,
        stream_name=NAMESPACE)
    cw_handler.setFormatter(LogstashFormatterV1())
    logger.addHandler(cw_handler)

thread_pool_executor = ThreadPoolExecutor(
    max_workers=configuration.MAX_WORKERS)
fact_extraction_executor = ThreadPoolExecutor(1)
loop = asyncio.get_event_loop()

kafka_consumer = AIOKafkaConsumer(configuration.PUP_QUEUE,
                                  loop=loop,
Beispiel #11
0
import os
import sys
import json
import urllib
import logging
import tempfile
import numpy as np

app = Flask(__name__)
db_url = os.environ['CLEARDB_DATABASE_URL'].split('//')
# db_url = os.environ['LOCAL_DATABASE_URL'].split('//')
aws_key = os.environ['ACCESS_KEY_S3']
aws_secret = os.environ['SECRET_ACCESS_S3']
region = os.environ['REGION']
bucket_name = os.environ['BUCKET_NAME']
aws_session = Session(aws_access_key_id=aws_key,
                      aws_secret_access_key=aws_secret, region_name=region)
s3 = aws_session.resource('s3')
ALLOWED_EXTENSIONS = set(['jpg'])
app.secret_key = os.environ['APP_SECRET']
app.config['MYSQL_USER'] = db_url[1].split(':')[0]
app.config['MYSQL_PASSWORD'] = db_url[1].split(':')[1].split('@')[0]
app.config['MYSQL_DB'] = db_url[1].split(':')[1].split('@')[1].split('/')[1].split('?')[0]
app.config['MYSQL_HOST'] = db_url[1].split(':')[1].split('@')[1].split('/')[0]
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024  # sets the maximum file size to 16MB
mysql = MySQL(app)

def allowed_file(filename):
    return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS

def get_user_credentials(emailid):
    conn = mysql.connection
 def token(self) -> str:
     """ Gets an authorisation token from the prescribed AWS CodeArtifact domain. """
     session = Session(**self.session_kwargs)
     codeart = session.client("codeartifact", **self.client_kwargs)
     response = codeart.get_authorization_token(**self.token_kwargs)
     return str(response["authorizationToken"])
    Application data model defined here.

    :description: CloudAlbum is a fully featured sample application for 'Moving to AWS serverless' training course
    :copyright: © 2019 written by Dayoungle Jun, Sungshik Jou.
    :license: MIT, see LICENSE for more details.
"""
import json
from datetime import datetime
from pynamodb.models import Model
from pynamodb.attributes import UnicodeAttribute, NumberAttribute, UTCDateTimeAttribute, ListAttribute, MapAttribute
from pynamodb.indexes import GlobalSecondaryIndex, IncludeProjection
from tzlocal import get_localzone
from boto3.session import Session
from os import environ

AWS_REGION = Session().region_name if environ.get(
    'AWS_REGION') is None else environ.get('AWS_REGION')


class EmailIndex(GlobalSecondaryIndex):
    """
    This class represents a global secondary index
    """
    class Meta:
        index_name = 'user-email-index'
        read_capacity_units = 5
        write_capacity_units = 5
        projection = IncludeProjection(['password'])

    # This attribute is the hash key for the index
    # Note that this attribute must also exist
Beispiel #14
0
from boto3.session import Session
import boto3

ACCESS_KEY = 'AKIAYUKS24DELDYQHOMP'
SECRET_KEY = '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'

session = Session(
    aws_access_key_id="AKIAYUKS24DEHZJ5GEIA",
    aws_secret_access_key="NcBiT8C9PNdXTjG+vjliENj/XR/EPeYGLDdCfG9v")
s3 = session.resource('s3')
your_bucket = s3.Bucket('roboticsalexburbano')

for s3_file in your_bucket.objects.all():
    print(s3_file.key)  # prints the contents of bucket
your_bucket.download_file('main.py', './a.py')
Beispiel #15
0
        'django.security.*': {
            'handlers': ['console'],
            'level': DEBUG and 'DEBUG' or 'INFO',
        },
        'django.request': {
            'handlers': ['console'],
            'level': DEBUG and 'DEBUG' or 'INFO',
        },
    },
}

# Production logging
if ENV not in ['local', 'test', 'staging', 'preview']:
    # add AWS monitoring
    boto3_session = Session(aws_access_key_id=AWS_ACCESS_KEY_ID,
                            aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
                            region_name=AWS_DEFAULT_REGION)

    LOGGING['formatters']['cloudwatch'] = {
        'format': '%(hostname)s %(name)-12s [%(levelname)-8s] %(message)s',
    }
    LOGGING['handlers']['watchtower'] = {
        'level': AWS_LOG_LEVEL,
        'class': 'watchtower.django.DjangoCloudWatchLogHandler',
        'boto3_session': boto3_session,
        'log_group': AWS_LOG_GROUP,
        'stream_name': AWS_LOG_STREAM,
        'filters': ['host_filter'],
        'formatter': 'cloudwatch',
    }
    LOGGING['loggers']['django.db.backends']['level'] = AWS_LOG_LEVEL
 def setUp(self):
     session = Session(region_name='us-west-1')
     current_dir = os.path.dirname(os.path.realpath(__file__))
     pill = placebo.attach(session, '{0}/test_data'.format(current_dir))
     pill.playback()
     self.rolling_deploy = RollingDeploy('stg', 'server-gms-extender', '0', 'ami-abcd1234', None, './regions.yml', stack_name='test-stack-name', session=session)
Beispiel #17
0
def main(event, context):
    """
    This function deploys the Inference Endpoint for the model generated at codepipeline execution runtime
    Calls to DynamoDB meta data store are also made to keep track of changes to environment in previous stage.

    :param event: is the event object that is passed when lambda function is triggered
    :param context: Another object passed to the environment
    :return: The function ends once a response has been given without the continuation token
    """
    job_id = event['CodePipeline.job']['id']
    job_data = event['CodePipeline.job']['data']
    try:
        input_artifact = job_data['inputArtifacts'][0]
        credentials = job_data['artifactCredentials']
        from_bucket = input_artifact['location']['s3Location']['bucketName']
        from_key = input_artifact['location']['s3Location']['objectKey']
        key_id = credentials['accessKeyId']
        key_secret = credentials['secretAccessKey']
        session_token = credentials['sessionToken']

        if "continuationToken" in job_data:
            continuation_token = job_data["continuationToken"]

            res = sagemaker.describe_endpoint(
                EndpointName=str(continuation_token))
            endpoint_service_status = str(res['EndpointStatus'])

            if endpoint_service_status == "Creating":
                msg = "Model Hosting Endpoint is being Created"
                print(msg)
                codepipeline.put_job_success_result(
                    jobId=job_id, continuationToken=continuation_token)

            if endpoint_service_status == "Updating":
                msg = "Model Hosting Endpoint is being Updated"
                print(msg)
                codepipeline.put_job_success_result(
                    jobId=job_id, continuationToken=continuation_token)

            if endpoint_service_status == "SystemUpdating":
                msg = "Model Hosting Endpoint System is being Updated"
                print(msg)
                codepipeline.put_job_success_result(
                    jobId=job_id, continuationToken=continuation_token)

            if endpoint_service_status == "InService":
                msg = "Model Hosting Endpoint is now InService"
                print(msg)
                codepipeline.put_job_success_result(jobId=job_id)

            if endpoint_service_status == "Failed":
                msg = "Model Hosting Endpoint Creation Failed"
                print(msg)
                codepipeline.put_job_failure_result(
                    jobId=job_id,
                    failureDetails={
                        'message': 'Endpoint Creation Failed',
                        'type': 'JobFailed'
                    })
            if endpoint_service_status == "RollingBack":
                msg = "Model Hosting Endpoint Encountered Errors"
                print(msg)
                codepipeline.put_job_failure_result(
                    jobId=job_id,
                    failureDetails={
                        'message': 'Endpoint Creation Rollback',
                        'type': 'JobFailed'
                    })
            if endpoint_service_status == "OutOfService":
                msg = "Model Hosting Endpoint Creation Failed"
                print(msg)
                codepipeline.put_job_failure_result(
                    jobId=job_id,
                    failureDetails={
                        'message': 'Endpoint Out of Service',
                        'type': 'JobFailed'
                    })
        else:
            session = Session(aws_access_key_id=key_id,
                              aws_secret_access_key=key_secret,
                              aws_session_token=session_token)
            s3 = session.client(
                's3', config=botocore.client.Config(signature_version='s3v4'))

            with tempfile.NamedTemporaryFile() as tmp:
                s3.download_file(from_bucket, from_key, tmp.name)
                with open(tmp.name) as f:
                    contents = f.readline()
                training_job_name = json.loads(contents)['training_job_name']

            res = sagemaker.list_training_jobs()
            for job in res['TrainingJobSummaries']:
                if job['TrainingJobName'] == str(training_job_name):
                    job_status = str(job['TrainingJobStatus'])
                    job_creation_time = str(job['CreationTime'])
                    job_end_time = str(job['TrainingEndTime'])

                    dynamodb.update_item(
                        TableName=str(os.environ['META_DATA_STORE']),
                        Key={'training_job_name': {
                            'S': training_job_name
                        }},
                        UpdateExpression=
                        "SET #job_creation_time= :val1, #job_end_time= :val2, #job_status= :val3",
                        ExpressionAttributeNames={
                            '#job_creation_time': 'job_creation_time',
                            '#job_end_time': 'job_end_time',
                            '#job_status': 'job_status'
                        },
                        ExpressionAttributeValues={
                            ':val1': {
                                'S': job_creation_time
                            },
                            ':val2': {
                                'S': job_end_time
                            },
                            ':val3': {
                                'S': job_status
                            }
                        })

            inference_img_uri = str(os.environ['FULL_NAME'])
            endpoint_name = str(os.environ["IMG"]) + '-' + str(
                datetime.datetime.today()).replace(' ', '-').replace(
                    ':', '-').rsplit('.')[0]
            model_name = 'model-' + str(endpoint_name)
            endpoint_config_name = 'endpoint-config-' + str(endpoint_name)
            model_obj_key = "{}/output/model.tar.gz".format(training_job_name)
            model_data_url = 's3://{}/{}'.format(str(os.environ['DEST_BKT']),
                                                 model_obj_key)

            mod_res = sagemaker.create_model(ModelName=model_name,
                                             ExecutionRoleArn=str(
                                                 os.environ['SAGE_ROLE_ARN']),
                                             PrimaryContainer={
                                                 'Image': inference_img_uri,
                                                 'ModelDataUrl': model_data_url
                                             })
            print(mod_res)
            conf_res = sagemaker.create_endpoint_config(
                EndpointConfigName=endpoint_config_name,
                ProductionVariants=[{
                    'VariantName': 'initial-variant',
                    'ModelName': model_name,
                    'InitialInstanceCount': 1,
                    'InstanceType': 'ml.t2.medium'
                }])
            print(conf_res)
            endpoint_res = sagemaker.create_endpoint(
                EndpointName=endpoint_name,
                EndpointConfigName=endpoint_config_name)
            print(endpoint_res)

            dynamodb.update_item(
                TableName=str(os.environ['META_DATA_STORE']),
                Key={'training_job_name': {
                    'S': training_job_name
                }},
                UpdateExpression=
                "SET #inference_image_uri= :val3, #model_name= :val4, #endpoint_config_name= :val5, "
                + "#endpoint_name= :val6",
                ExpressionAttributeNames={
                    '#inference_image_uri': 'inference_image_uri',
                    '#model_name': 'model_name',
                    '#endpoint_config_name': 'endpoint_config_name',
                    '#endpoint_name': 'endpoint_name'
                },
                ExpressionAttributeValues={
                    ':val3': {
                        'S': inference_img_uri
                    },
                    ':val4': {
                        'S': model_name
                    },
                    ':val5': {
                        'S': endpoint_config_name
                    },
                    ':val6': {
                        'S': endpoint_name
                    }
                })
            if 'EndpointArn' in endpoint_res.keys():
                codepipeline.put_job_success_result(
                    jobId=job_id, continuationToken=endpoint_name)
            else:
                codepipeline.put_job_failure_result(jobId=job_id,
                                                    failureDetails={
                                                        'message':
                                                        'Endpoint not Created',
                                                        'type': 'JobFailed'
                                                    })
    except Exception as e:
        print(e)
        codepipeline.put_job_failure_result(jobId=job_id,
                                            failureDetails={
                                                'message': str(e),
                                                'type': 'JobFailed'
                                            })
Beispiel #18
0
def get_session(aws_profile):
    session = Session(profile_name=aws_profile)
    return session
'''

import lob
from boto3.session import Session
from botocore.exceptions import ClientError
from boto3.dynamodb.conditions import Key, Attr
import json
import requests
import datetime
from config import *

##API keys are stored in Config.py
lob.api_key = LOB_API_KEY
session = Session(
    aws_access_key_id=AWS_ACCESS_KEY,
    aws_secret_access_key=AWS_SECRET_KEY,
    region_name=REGION_NAME
)

dynamodb = session.resource('dynamodb')
table = dynamodb.Table('ThryvePostcards')

#all postcards will be sent from Thryve
THRYVE_INSIDE = lob.Address.create(
    name='Thryve Inside',
    address_line1='4701 Patrick Henry Drive',
    address_city='Santa Clara',
    address_state='CA',
    address_country='US',
    address_zip='95054'
)
Beispiel #20
0
def s3_download(filename):
    s3_client = Session().client('s3')
    return s3_client.get_object(Bucket='downloads.scylladb.com', Key=filename)['Body'].read()
Beispiel #21
0
        },
        'reporting': {
            'handlers': LOGGING_HANDLERS,
            'level': KOKU_LOGGING_LEVEL,
        },
        'reporting_common': {
            'handlers': LOGGING_HANDLERS,
            'level': KOKU_LOGGING_LEVEL,
        },
    },
}

if CW_AWS_ACCESS_KEY_ID:
    NAMESPACE = 'unknown'
    BOTO3_SESSION = Session(aws_access_key_id=CW_AWS_ACCESS_KEY_ID,
                            aws_secret_access_key=CW_AWS_SECRET_ACCESS_KEY,
                            region_name=CW_AWS_REGION)
    try:
        with open("/var/run/secrets/kubernetes.io/serviceaccount/namespace",
                  "r") as f:
            NAMESPACE = f.read()
    except Exception:  # pylint: disable=W0703
        pass
    WATCHTOWER_HANDLER = {
        'level': KOKU_LOGGING_LEVEL,
        'class': 'watchtower.CloudWatchLogHandler',
        'boto3_session': BOTO3_SESSION,
        'log_group': CW_LOG_GROUP,
        'stream_name': NAMESPACE,
        'formatter': LOGGING_FORMATTER,
    }
def main():
    logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
                        level='ERROR')
    logging.getLogger('botocore').setLevel(logging.CRITICAL)

    parser = argparse.ArgumentParser(
        description='Script to pull the EC2 spot price history out '
        'of AWS and push it into Graphite.')
    parser.add_argument(
        '--aws-access-key-id',
        dest='aws_access_key_id',
        help='Specify a value here if you want to use a different '
        'AWS_ACCESS_KEY_ID than configured in the AWS CLI.')
    parser.add_argument(
        '--aws-secret-access-key',
        dest='aws_secret_access_key',
        help='Specify a value here if you want to use a different '
        'AWS_SECRET_ACCESS_KEY than configured in the AWS CLI.')
    parser.add_argument(
        '--profile',
        dest='profile_name',
        help='The AWS CLI profile to use. Defaults to the default profile.')
    parser.add_argument('--region',
                        dest='region_name',
                        default='us-east-1',
                        help='The AWS region to connect to. Defaults to the '
                        'one configured for the AWS CLI.')
    parser.add_argument('--interval',
                        dest='interval',
                        default=1,
                        required=False,
                        type=int,
                        help='The interval in minutes back from now to gather '
                        'prices for. Defaults to 1 minute.')
    parser.add_argument(
        '--products',
        dest='product_descriptions',
        default='Linux/UNIX (Amazon VPC), Windows (Amazon VPC)',
        required=False,
        help='A comma separated list of products to fetch. '
        'Defauls to "Linux/UNIX (Amazon VPC), Windows (Amazon VPC)"')
    parser.add_argument(
        '--log-level',
        dest='log_level',
        default='INFO',
        required=False,
        help='The log level to log messages with. Defaults to ERROR.')
    parser.add_argument(
        '--graphite-host',
        dest='graphite_host',
        default='localhost',
        required=False,
        help='The graphite host to send the metrics to. Defaults to localhost.'
    )
    parser.add_argument(
        '--graphite-port',
        dest='graphite_port',
        default=2004,
        required=False,
        type=int,
        help='The graphite port to send the metrics to. Defaults to 2004.')
    parser.add_argument('--graphite-prefix',
                        dest='graphite_prefix',
                        default='aws.ec2.spot-price',
                        required=False,
                        help='A prefix to prepend to the metric name. Defaults'
                        'to "aws.ec2.spot-price".')
    args = parser.parse_args()

    logging.getLogger().setLevel(args.log_level)

    session_args = {
        key: value
        for key, value in vars(args).items() if key in [
            'aws_access_key_id', 'aws_secret_access_key', 'profile_name',
            'region_name'
        ]
    }
    try:
        session = Session(**session_args)
        ec2 = session.client('ec2')
    except BotoCoreError as exc:
        logging.error("Connecting to the EC2 API failed: %s", exc)
        sys.exit(1)
    else:
        logging.debug("Connection to EC2 API established.")

    product_descriptions = [
        product.strip() for product in args.product_descriptions.split(',')
    ]
    metrics = get_spot_prices(ec2, args.interval, args.graphite_prefix,
                              product_descriptions)
    send_to_graphite(metrics, args.graphite_host, args.graphite_port)