Beispiel #1
0
def build_vpc(aws_creds, region, cidr=""):
    print('Building VPC for %s network' % str(cidr))
    session = botocore.session.get_session()
    session.set_credentials(aws_creds.access_key, aws_creds.secret_key)
    ec2 = session.get_service('ec2')
    operation = ec2.get_operation('CreateVpc')
    endpoint = ec2.get_endpoint(region)
    http_response, response_data = operation.call(endpoint,
                                                  CidrBlock=cidr)
    p3.pprint(str(http_response.status_code) + " - " + http_response.reason)
    p3.pprint(response_data)

    if http_response.status_code != 200:
        raise(ApiException)

    vpc_id = response_data['Vpc']['VpcId']

    operation = ec2.get_operation('CreateTags')
    http_response, response_data = operation.call(endpoint,
                                                  Resources=[vpc_id],
                                                  Tags=[{"Key": "Name",
                                                         "Value": 'Meetup'
                                                         }],
                                                  )

    return vpc_id
Beispiel #2
0
    def connect_to_host(cls, host='localhost', port=8000, is_secure=False,
                        session=None, access_key=None, secret_key=None,
                        **kwargs):
        """
        Connect to a specific host.

        This method has been deprecated in favor of :meth:`~.connect`

        Parameters
        ----------
        host : str, optional
            Address of the host (default 'localhost')
        port : int, optional
            Connect to the host on this port (default 8000)
        is_secure : bool, optional
            Enforce https connection (default False)
        session : :class:`~botocore.session.Session`, optional
            The Session object to use for the connection
        access_key : str, optional
            If session is None, set this access key when creating the session
        secret_key : str, optional
            If session is None, set this secret key when creating the session
        **kwargs : dict
            Keyword arguments to pass to the constructor

        """
        if session is None:
            session = botocore.session.get_session()
            if access_key is not None:
                session.set_credentials(access_key, secret_key)
        url = "http://%s:%d" % (host, port)
        service = session.get_service('dynamodb')
        endpoint = service.get_endpoint(
            'local', endpoint_url=url, is_secure=is_secure)
        return cls(service, endpoint, **kwargs)
def test_xml_parsing():
    for dp in ['responses', 'errors']:
        data_path = os.path.join(os.path.dirname(__file__), 'xml')
        data_path = os.path.join(data_path, dp)
        session = botocore.session.get_session()
        xml_files = glob.glob('%s/*.xml' % data_path)
        service_names = set()
        for fn in xml_files:
            service_names.add(os.path.split(fn)[1].split('-')[0])
        for service_name in service_names:
            service = session.get_service(service_name)
            service_xml_files = glob.glob('%s/%s-*.xml' %
                                          (data_path, service_name))
            for xmlfile in service_xml_files:
                dirname, filename = os.path.split(xmlfile)
                basename = os.path.splitext(filename)[0]
                jsonfile = os.path.join(dirname, basename + '.json')
                sn, opname = basename.split('-', 1)
                opname = opname.split('#')[0]
                operation = service.get_operation(opname)
                r = XmlResponse(session, operation)
                fp = open(xmlfile)
                xml = fp.read()
                fp.close()
                r.parse(xml, 'utf-8')
                save_jsonfile(jsonfile, r)
                fp = open(jsonfile)
                data = json.load(fp)
                fp.close()
                yield check_dicts, xmlfile, r.get_value(), data
def test_json_parsing():
    input_path = os.path.join(os.path.dirname(__file__), 'json')
    input_path = os.path.join(input_path, 'inputs')
    output_path = os.path.join(os.path.dirname(__file__), 'json')
    output_path = os.path.join(output_path, 'outputs')
    session = botocore.session.get_session()
    jsonfiles = glob.glob('%s/*.json' % input_path)
    service_names = set()
    for fn in jsonfiles:
        service_names.add(os.path.split(fn)[1].split('-')[0])
    for service_name in service_names:
        service = session.get_service(service_name)
        service_input_files = glob.glob('%s/%s-*.json' %
                                        (input_path, service_name))
        for inputfile in service_input_files:
            dirname, filename = os.path.split(inputfile)
            outputfile = os.path.join(output_path, filename)
            basename = os.path.splitext(filename)[0]
            sn, opname = basename.split('-', 1)
            operation = service.get_operation(opname)
            r = JSONResponse(session, operation)
            with open(inputfile, 'rb') as fp:
                jsondoc = fp.read()
            r.parse(jsondoc, 'utf-8')
            save_jsonfile(outputfile, r)
            fp = open(outputfile)
            data = json.load(fp)
            fp.close()
            yield check_dicts, inputfile, r.get_value(), data
def main():
    print
    print "sending email via ses - initial parameters"

    subject     = "SES test email"
    region      = 'us-east-1' #change this to your region

    destination = {'ToAddresses': ['*****@*****.**']} # to test ses validate an email address and use that one
    source      = '*****@*****.**' # this is the from address
    msgbody     = {'Subject': {'Data': 'this is a subject'}, 'Body': {'Text': {'Data': 'This is the body' + "\nand more lines"}}}

    print "setting up the session and sending the email"
    session = botocore.session.get_session()
    ses = session.get_service('ses')
    operation = ses.get_operation('SendEmail')
    endpoint = ses.get_endpoint(region)

    http_response, response_data = operation.call(endpoint,
                                                  source=source,
                                                  destination=destination,
                                                  message=msgbody)

    print "the results returned from the SendEmail operation"
    print http_response
    print response_data
def test_xml_parsing():
    for dp in ['responses', 'errors']:
        data_path = os.path.join(os.path.dirname(__file__), 'xml')
        data_path = os.path.join(data_path, dp)
        session = botocore.session.get_session()
        xml_files = glob.glob('%s/*.xml' % data_path)
        service_names = set()
        for fn in xml_files:
            service_names.add(os.path.split(fn)[1].split('-')[0])
        for service_name in service_names:
            service = session.get_service(service_name)
            service_xml_files = glob.glob('%s/%s-*.xml' % (data_path,
                                                           service_name))
            for xmlfile in service_xml_files:
                dirname, filename = os.path.split(xmlfile)
                basename = os.path.splitext(filename)[0]
                jsonfile = os.path.join(dirname, basename + '.json')
                sn, opname = basename.split('-', 1)
                opname = opname.split('#')[0]
                operation = service.get_operation(opname)
                r = XmlResponse(session, operation)
                fp = open(xmlfile)
                xml = fp.read()
                fp.close()
                r.parse(xml, 'utf-8')
                save_jsonfile(jsonfile, r)
                fp = open(jsonfile)
                data = json.load(fp)
                fp.close()
                yield check_dicts, xmlfile, r.get_value(), data
    def retrieve_notifications(self, max_notifications=1):
        """Attempts to retrieve notifications from the provided SQS queue"""
        session = botocore.session.get_session()
        sqs = session.get_service("sqs")
        receive = sqs.get_operation("ReceiveMessage")
        http_response, response_data = receive.call(sqs.get_endpoint(self._region),
                                                    queue_url=self._queue_url,
                                                    wait_time_seconds=20,
                                                    max_number_of_messages=max_notifications)

        # Swallow up any errors/issues, logging them out
        if http_response.status_code != 200 or not "Messages" in response_data:
            log.error(u"Failed to retrieve messages from queue %s with status_code %s: %s" %
                      (self._queue_url, http_response.status_code, response_data))
            return []

        notifications = []
        for msg in response_data.get("Messages", []):
            # Construct a message that we can parse into events.
            message = Message(self._queue_url, self._region, msg)
            try:
                notifications.append(ScalingNotification(message))
            except Exception:
                log.exception(u"Invalid message received; will delete from queue: %s", msg)
                message.delete()

        return notifications
Beispiel #8
0
    def connect_to_region(cls, region, session=None, access_key=None,
                          secret_key=None, **kwargs):
        """
        Connect to an AWS region.

        This method has been deprecated in favor of :meth:`~.connect`

        Parameters
        ----------
        region : str
            Name of an AWS region
        session : :class:`~botocore.session.Session`, optional
            The Session object to use for the connection
        access_key : str, optional
            If session is None, set this access key when creating the session
        secret_key : str, optional
            If session is None, set this secret key when creating the session
        **kwargs : dict
            Keyword arguments to pass to the constructor

        """
        if session is None:
            session = botocore.session.get_session()
            if access_key is not None:
                session.set_credentials(access_key, secret_key)
        service = session.get_service('dynamodb')
        return cls(service, service.get_endpoint(region), **kwargs)
Beispiel #9
0
def main():
	session = botocore.session.Session()
	session.set_credentials(AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY)
	ec2 = session.get_service('ec2')
	endpoint = ec2.get_endpoint(AWS_DEFAULT_REGION)
	op_din = ec2.get_operation('DescribeInstances')
	op_stop = ec2.get_operation('StopInstances')

	http_response, reservation_data = op_din.call(endpoint, filters=[{"Name":"instance-state-name","Values":["running"]}] )
	if not http_response.ok:
		print(reservation_data['Errors'])
		sys.exit(2)
	else:
		reservations = reservation_data['Reservations']

	#Pull the instances out of reservations
	instances = []
	for reservation in reservations:
		instances += reservation['Instances']

	print("Found %i instances in running state" % len(instances))
	instance_ids = [instance['InstanceId'] for instance in instances]

	http_response, data = op_stop.call(endpoint, instance_ids = instance_ids)
	if not http_response.ok:
		print(data['Errors'])
		sys.exit(2)

	for instance in data['StoppingInstances']:
		print("Instance %s now stopping" % instance['InstanceId'])	
    def retrieve_events(self, max_events=1):
        """Attempts to retrieve events from the provided SQS queue"""
        session = botocore.session.get_session()
        sqs = session.get_service("sqs")
        receive = sqs.get_operation("ReceiveMessage")
        http_response, response_data = receive.call(
            sqs.get_endpoint(self._region),
            queue_url=self._queue_url,
            wait_time_seconds=20,
            max_number_of_messages=max_events)

        # Swallow up any errors/issues, logging them out
        if http_response.status_code != 200 or not "Messages" in response_data:
            log.error(
                u"Failed to retrieve messages from queue %s with status_code %s: %s"
                % (self._queue_url, http_response.status_code, response_data))
            return []

        events = []
        for msg in response_data.get("Messages", []):
            # Construct a message that we can parse into events.
            message = Message(self._queue_url, self._region, msg)

            try:
                # Try to parse our message as a custom resource event
                event = ResourceEvent(message)

                events.append(event)
            except Exception:
                log.exception(
                    u"Invalid message received; will delete from queue: %s",
                    msg)
                message.delete()

        return events
def test_response_parsing():
    data_path = os.path.join(os.path.dirname(__file__), 'data')
    session = botocore.session.get_session()
    xml_files = glob.glob('%s/*.xml' % data_path)
    service_names = set()
    for fn in xml_files:
        service_names.add(os.path.split(fn)[1].split('-')[0])
    for service_name in service_names:
        service = session.get_service(service_name)
        service_xml_files = glob.glob('%s/%s-*.xml' % (data_path, service_name))
        for xmlfile in service_xml_files:
            dirname, filename = os.path.split(xmlfile)
            basename = os.path.splitext(filename)[0]
            jsonfile = os.path.join(dirname, basename + '.json')
            sn, opname = basename.split('-', 1)
            operation = service.get_operation(opname)
            r = XmlResponse(operation)
            fp = open(xmlfile)
            xml = fp.read()
            fp.close()
            r.parse(xml)
            # This is a little convenience when creating new tests.
            # You just have to drop the XML file into the data directory
            # and then, if not JSON file is present, this code will
            # create the JSON file and dump the parsed value into it.
            # Of course, you need to validate that the JSON is correct
            # but it makes it easy to bootstrap more tests.
            if not os.path.isfile(jsonfile):
                fp = open(jsonfile, 'w')
                json.dump(r.get_value(), fp, indent=4)
                fp.close()
            fp = open(jsonfile)
            data = json.load(fp)
            fp.close()
            yield check_dicts, xmlfile, r.get_value(), data
def test_json_parsing():
    input_path = os.path.join(os.path.dirname(__file__), 'json')
    input_path = os.path.join(input_path, 'inputs')
    output_path = os.path.join(os.path.dirname(__file__), 'json')
    output_path = os.path.join(output_path, 'outputs')
    session = botocore.session.get_session()
    jsonfiles = glob.glob('%s/*.json' % input_path)
    service_names = set()
    for fn in jsonfiles:
        service_names.add(os.path.split(fn)[1].split('-')[0])
    for service_name in service_names:
        service = session.get_service(service_name)
        service_input_files = glob.glob('%s/%s-*.json' % (input_path,
                                                          service_name))
        for inputfile in service_input_files:
            dirname, filename = os.path.split(inputfile)
            outputfile = os.path.join(output_path, filename)
            basename = os.path.splitext(filename)[0]
            sn, opname = basename.split('-', 1)
            operation = service.get_operation(opname)
            r = JSONResponse(session, operation)
            with open(inputfile, 'rb') as fp:
                jsondoc = fp.read()
            r.parse(jsondoc, 'utf-8')
            save_jsonfile(outputfile, r)
            fp = open(outputfile)
            data = json.load(fp)
            fp.close()
            yield check_dicts, inputfile, r.get_value(), data
Beispiel #13
0
def build_elb(aws_creds, region, lb_name="", subnets=[], elb_sg_id=""):
    print('Building elb')
    session = botocore.session.get_session()
    session.set_credentials(aws_creds.access_key, aws_creds.secret_key)
    elb = session.get_service('elb')
    operation = elb.get_operation('CreateLoadBalancer')
    endpoint = elb.get_endpoint(region)
    http_response, response_data = operation.call(endpoint,
                                                  LoadBalancerName=lb_name,
                                                  Listeners=[
                                                      {"Protocol": 'http',
                                                       "LoadBalancerPort": 80,
                                                       "InstanceProtocol": 'http',
                                                       "InstancePort": 80}],
                                                  Subnets=subnets,
                                                  SecurityGroups=[elb_sg_id],
                                                  Scheme='internal')

    p3.pprint(str(http_response.status_code) + " - " + http_response.reason)
    p3.pprint(response_data)

    if http_response.status_code != 200:
        raise(ApiException)

    elb_name = response_data['DNSName']
    return elb_name
def main():

    print "some dynamoDb examples"

    session = botocore.session.get_session()
    db = session.get_service('dynamodb')

    print "DescribeTable"
    operation = db.get_operation('DescribeTable')
    endpoint = db.get_endpoint('us-east-1') # change this to your region
    tableName = 'myTable' #change this to your dynamoDb test table name
    http_response, response_data = operation.call(endpoint,
                                              table_name=tableName)
    print
    print "results of DescribeTable for tableName:" + tableName
    print http_response
    print response_data

    getops = db.get_operation('GetItem')
    # assumes a numeric hash key as the primaryKey and named customerId
    # you can create this item in the aws console if you want
    getItem = {u'customerId':{"N":"1"}}
    http_response, response_data = getops.call(endpoint, table_name='myTable', key=getItem)

    print
    print http_response
    print "getItem Results: " + str(response_data)

    print
    print "Insert a few hundred Items (records) in " + tableName

    putops = db.get_operation('PutItem')

    i = 0
    x = {}
    y = {}
    while i < 200:
        print i
        string = "this is a made up key string for itemId=" + str(i)
        putItem = {u'customerId':{"N":i}}
        x['N']=str(i)
        y['S']=string
        putItem[u'customerId']=x
        putItem[u'astringattribute']=y
        # 'N' tells AWS that the string should be treated as a number
        # 'S' treat the string as a string
        # 'SS' treat the string as a set of strings (like an array)
        putItem[u'setstringattribute'] = {u'SS':(u"x", u"y")}

        # uncomment out the print putItem if you want to see what we are inserting into DynamoDb
        #print putItem

        http_response, response_data = putops.call(endpoint, table_name='myTable', item=putItem)
        i = i + 1

        # dynamoDb throttles inserts so you may see them slow down in your terminal window
        # lets put a little delay into the inserts ourselves
        if i % 10 == 0:
            time.sleep(2)
Beispiel #15
0
def init_botocore_service(name, region):
    """
    Initialize the proper service with botocore
    """
    session = botocore.session.get_session()
    service = session.get_service(name)
    endpoint = service.get_endpoint(region)
    return service, endpoint
Beispiel #16
0
def init_botocore_service(name, region):
    """
    Initialize the proper service with botocore
    """
    session = botocore.session.get_session()
    service = session.get_service(name)
    endpoint = service.get_endpoint(region)
    return service, endpoint
Beispiel #17
0
 def test_get_endpoint_with_no_region(self):
     self.environ["AWS_ACCESS_KEY_ID"] = "access_key"
     self.environ["AWS_SECRET_ACCESS_KEY"] = "secret_key"
     session = botocore.session.get_session()
     # Test global endpoint service such as iam.
     service = session.get_service("iam")
     endpoint = service.get_endpoint()
     self.assertEqual(endpoint.host, "https://iam.amazonaws.com/")
 def test_get_session_token(self):
     session = botocore.session.get_session()
     sns = session.get_service('sts')
     op = sns.get_operation('GetSessionToken')
     params = {}
     endpoint = self.get_mocked_endpoint()
     with self.assertRaises(NoCredentialsError):
         endpoint.make_request(op, params)
Beispiel #19
0
def test_can_make_request():
    session = botocore.session.get_session()
    for service_name in SMOKE_TESTS:
        service = session.get_service(service_name)
        endpoint = service.get_endpoint(REGION)
        for operation_name in SMOKE_TESTS[service_name]:
            kwargs = SMOKE_TESTS[service_name][operation_name]
            yield _make_call, service, endpoint, operation_name, kwargs
Beispiel #20
0
 def test_get_session_token(self):
     session = botocore.session.get_session()
     sns = session.get_service('sts')
     op = sns.get_operation('GetSessionToken')
     params = {}
     endpoint = self.get_mocked_endpoint()
     with self.assertRaises(NoCredentialsError):
         endpoint.make_request(op, params)
Beispiel #21
0
 def test_get_session_token(self):
     with self.get_credentials_patch as mock_fn:
         session = botocore.session.get_session()
         sns = session.get_service("sts")
         op = sns.get_operation("GetSessionToken")
         params = {}
         endpoint = self.get_mocked_endpoint()
         self.assertRaises(NoCredentialsError, endpoint.make_request, op, params)
Beispiel #22
0
def test_can_make_request():
    session = botocore.session.get_session()
    for service_name in SMOKE_TESTS:
        service = session.get_service(service_name)
        endpoint = service.get_endpoint(REGION)
        for operation_name in SMOKE_TESTS[service_name]:
            kwargs = SMOKE_TESTS[service_name][operation_name]
            yield _make_call, service, endpoint, operation_name, kwargs
Beispiel #23
0
 def put_object(self, bucket, key, content):
     session = botocore.session.get_session()
     service = session.get_service("s3")
     endpoint = service.get_endpoint("us-east-1")
     http, response = service.get_operation("CreateBucket").call(endpoint, bucket=bucket)
     time.sleep(5)
     self.addCleanup(service.get_operation("DeleteBucket").call, endpoint, bucket=bucket)
     http, response = service.get_operation("PutObject").call(endpoint, bucket=bucket, key=key, body=content)
     self.addCleanup(service.get_operation("DeleteObject").call, endpoint, bucket=bucket, key=key)
Beispiel #24
0
def gen_s3_obj(method):
    config = common.get_config()
    session = botocore.session.get_session()
    session.set_credentials(config['access_key'], config['secret_key'])

    s3 = session.get_service('s3')
    operation = s3.get_operation(method)
    endpoint = s3.get_endpoint(config['region'])

    return [endpoint, operation]
Beispiel #25
0
def gen_s3_obj(method):
    config = common.get_config()
    session = botocore.session.get_session()
    session.set_credentials(config["access_key"], config["secret_key"])

    s3 = session.get_service("s3")
    operation = s3.get_operation(method)
    endpoint = s3.get_endpoint(config["region"])

    return [endpoint, operation]
Beispiel #26
0
 def test_assume_role_with_saml(self):
     session = botocore.session.get_session()
     sns = session.get_service('sts')
     op = sns.get_operation('AssumeRoleWithSAML')
     self.assertEqual(op.signature_version, None)
     endpoint = self.get_mocked_endpoint()
     params = op.build_parameters(principal_arn='principal_arn',
                                     role_arn='role_arn',
                                     saml_assertion='saml_assertion')
     endpoint.make_request(op, params)
     self.assertNotIn('auth', self.called_params)
     self.reset_called_params()
def main():
    print
    print "Putting a file in s3 - initial parameters"

    bucket     = 'yourbucketname' # change this to your bucket name
    key        = 'xyzzy' # a subfolder under the bucket
    filename   ='testfile.txt' # the file we will put into S3
    region     = 'us-east-1' # change this to your region
    acl        = 'public-read' # we are going to set the ACL to public-read so we can access the file via a url

    print
    print '         region: ' + region
    print '         bucket: ' + bucket
    print 'key (subfolder): ' + key
    print '       filename: ' + './' + filename
    print '            acl: ' + acl

    session = botocore.session.get_session()
    s3 = session.get_service('s3')
    operation = s3.get_operation('PutObject')
    endpoint = s3.get_endpoint(region)

    print
    print "uploading the file to s3"

    fp = open('./' + filename, 'rb')
    http_response, response_data = operation.call(endpoint,
                                              bucket=bucket,
                                              key=key + '/' + filename,
                                              body=fp)
    print http_response
    print response_data
    print
    print "getting s3 object properties of file we just uploaded"
    operation = s3.get_operation('GetObjectAcl')
    http_response, response_data = operation.call(endpoint,
                                                  bucket=bucket,
                                                  key=key + '/' + filename)
    print http_response
    print response_data
    print
    print "setting the acl to public-read"
    operation = s3.get_operation('PutObjectAcl')
    http_response, response_data = operation.call(endpoint,
                                                  bucket=bucket,
                                                  key=key + '/' + filename,
                                                  acl=acl)
    print http_response
    print response_data
    print
    print "The url of the object is:"
    print
    print 'http://'+bucket+'.s3.amazonaws.com/'+ key + '/' + filename
 def test_assume_role_with_saml(self):
     session = botocore.session.get_session()
     sns = session.get_service('sts')
     op = sns.get_operation('AssumeRoleWithSAML')
     self.assertEqual(op.signature_version, None)
     endpoint = self.get_mocked_endpoint()
     params = op.build_parameters(principal_arn='principal_arn',
                                  role_arn='role_arn',
                                  saml_assertion='saml_assertion')
     endpoint.make_request(op, params)
     self.assertNotIn('auth', self.called_params)
     self.reset_called_params()
Beispiel #29
0
 def put_object(self, bucket, key, content):
     session = botocore.session.get_session()
     service = session.get_service('s3')
     endpoint = service.get_endpoint('us-east-1')
     http, response = service.get_operation(
         'CreateBucket').call(endpoint, bucket=bucket)
     time.sleep(5)
     self.addCleanup(service.get_operation('DeleteBucket').call,
                     endpoint, bucket=bucket)
     http, response = service.get_operation('PutObject').call(
         endpoint, bucket=bucket, key=key, body=content)
     self.addCleanup(service.get_operation('DeleteObject').call,
                     endpoint, bucket=bucket, key=key)
Beispiel #30
0
 def test_assume_role_with_saml(self):
     with self.get_credentials_patch as mock_fn:
         session = botocore.session.get_session()
         sns = session.get_service("sts")
         op = sns.get_operation("AssumeRoleWithSAML")
         self.assertEqual(op.signature_version, None)
         endpoint = self.get_mocked_endpoint()
         params = op.build_parameters(
             principal_arn="principal_arn", role_arn="role_arn", saml_assertion="saml_assertion"
         )
         endpoint.make_request(op, params)
         self.assertNotIn("auth", self.called_params)
         self.reset_called_params()
Beispiel #31
0
def resource_from_arn(arn, data):
    session = botocore.session.get_session()
    parts = ArnComponents(*arn.split(':', 6))
    service = session.get_service(parts.service)
    if ':' in parts.resource:
        resource_type, resource_id = parts.resource.split(':')
    elif '/' in parts.resource:
        resource_type, resource_id = parts.resource.split('/')
    else:
        resource_type = parts.resource
    endpoint = Endpoint(service, parts.region, parts.account)
    resource_path = '.'.join(['aws', parts.service, resource_type])
    resource_cls = find_resource_class(resource_path)
    return resource_cls(endpoint, data)
Beispiel #32
0
def resource_from_arn(arn, data):
    session = botocore.session.get_session()
    parts = ArnComponents(*arn.split(':', 6))
    service = session.get_service(parts.service)
    if ':' in parts.resource:
        resource_type, resource_id = parts.resource.split(':')
    elif '/' in parts.resource:
        resource_type, resource_id = parts.resource.split('/')
    else:
        resource_type = parts.resource
    endpoint = Endpoint(service, parts.region, parts.account)
    resource_path = '.'.join(['aws', parts.service, resource_type])
    resource_cls = find_resource_class(resource_path)
    return resource_cls(endpoint, data)
Beispiel #33
0
	def push_file_to_s3(file):
		#print("Putting a file in s3 - initial parameters")

		bucket = IMS.settings.BUCKET_NAME # change this to your bucket name
		key = 'croomer_production' # a subfolder under the bucket
		filelocation = IMS.settings.MEDIA_ROOT + '/' + file.name # the file we will put into S3
		filename = file.name
		region = 'us-west-2' # change this to your region
		acl = 'public-read' # we are going to set the ACL to public-read so we can access the file via a url

		#print( ' region: ' + region)
		#print( ' bucket: ' + bucket)
		#print( 'key (subfolder): ' + key)
		#print( ' filename: ' + './' + filename)
		#print( ' acl: ' + acl)

		session = botocore.session.get_session()
		s3 = session.get_service('s3')
		operation = s3.get_operation('PutObject')
		endpoint = s3.get_endpoint(region)

		#print("uploading the file to s3")

		fp = open('./' + filelocation, 'rb')
		http_response, response_data = operation.call(endpoint,
												  bucket=bucket,
												  key=key + '/' + filename,
												  body=fp)
		#print (http_response)
		#print (response_data)
		#print("getting s3 object properties of file we just uploaded")
		operation = s3.get_operation('GetObjectAcl')
		http_response, response_data = operation.call(endpoint,
													  bucket=bucket,
													  key=key + '/' + filename)
		#print( http_response)
		#print( response_data)
		#print("setting the acl to public-read")
		operation = s3.get_operation('PutObjectAcl')
		http_response, response_data = operation.call(endpoint,
													  bucket=bucket,
													  key=key + '/' + filename,
													  acl=acl)
		#print( http_response)
		#print( response_data)
		#print("The url of the object is:")
		#print ('http://'+bucket+'.s3.amazonaws.com/'+ key + '/' + filename)
		return 'http://'+bucket+'.s3.amazonaws.com/'+ key + '/' + filename
Beispiel #34
0
def _toggle_term_protect(name, enabled):
    '''
    Toggle termination protection on a node
    '''
    # region is required for all boto queries
    region = get_location(None)

    # init botocore
    session = botocore.session.get_session()
    session.set_credentials(
        access_key=__opts__['AWS.id'],
        secret_key=__opts__['AWS.key'],
    )

    service = session.get_service('ec2')
    endpoint = service.get_endpoint(region)

    # get the instance-id for the supplied node name
    conn = get_conn(location=region)
    node = get_node(conn, name)

    params = {
        'instance_id': node.id,
        'attribute': 'disableApiTermination',
        'value': 'true' if enabled else 'false',
    }

    # get instance information
    operation = service.get_operation('modify-instance-attribute')
    http_response, response_data = operation.call(endpoint, **params)

    if http_response.status_code == 200:
        msg = (
            'Termination protection successfully {0} on {1}'.format(
                enabled and 'enabled' or 'disabled',
                name
            )
        )
        log.info(msg)
        return msg
    else:
        msg = (
            'Bad response from AWS: {0}'.format(
                http_response.status_code
            )
        )
        log.error(msg)
        return msg
Beispiel #35
0
def _toggle_term_protect(name, enabled):
    '''
    Toggle termination protection on a node
    '''
    # region is required for all boto queries
    region = get_location(None)

    # init botocore
    vm_ = get_configured_provider()
    session = botocore.session.get_session()  # pylint: disable=E0602
    session.set_credentials(
        access_key=config.get_cloud_config_value(
            'id', vm_, __opts__, search_global=False
        ),
        secret_key=config.get_cloud_config_value(
            'key', vm_, __opts__, search_global=False
        )
    )

    service = session.get_service('ec2')
    endpoint = service.get_endpoint(region)

    # get the instance-id for the supplied node name
    conn = get_conn(location=region)
    node = get_node(conn, name)

    params = {
        'instance_id': node.id,
        'attribute': 'disableApiTermination',
        'value': 'true' if enabled else 'false',
    }

    # get instance information
    operation = service.get_operation('modify-instance-attribute')
    http_response, response_data = operation.call(endpoint, **params)

    if http_response.status_code == 200:
        msg = 'Termination protection successfully {0} on {1}'.format(
            enabled and 'enabled' or 'disabled',
            name
        )
        log.info(msg)
        return msg

    # No proper HTTP response!?
    msg = 'Bad response from AWS: {0}'.format(http_response.status_code)
    log.error(msg)
    return msg
Beispiel #36
0
def _toggle_term_protect(name, enabled):
    '''
    Toggle termination protection on a node
    '''
    # region is required for all boto queries
    region = get_location(None)

    # init botocore
    vm_ = get_configured_provider()
    session = botocore.session.get_session()  # pylint: disable=E0602
    session.set_credentials(
        access_key=config.get_cloud_config_value(
            'id', vm_, __opts__, search_global=False
        ),
        secret_key=config.get_cloud_config_value(
            'key', vm_, __opts__, search_global=False
        )
    )

    service = session.get_service('ec2')
    endpoint = service.get_endpoint(region)

    # get the instance-id for the supplied node name
    conn = get_conn(location=region)
    node = get_node(conn, name)

    params = {
        'instance_id': node.id,
        'attribute': 'disableApiTermination',
        'value': 'true' if enabled else 'false',
    }

    # get instance information
    operation = service.get_operation('modify-instance-attribute')
    http_response, response_data = operation.call(endpoint, **params)

    if http_response.status_code == 200:
        msg = 'Termination protection successfully {0} on {1}'.format(
            enabled and 'enabled' or 'disabled',
            name
        )
        log.info(msg)
        return msg

    # No proper HTTP response!?
    msg = 'Bad response from AWS: {0}'.format(http_response.status_code)
    log.error(msg)
    return msg
Beispiel #37
0
def destroy_subnet(aws_creds, region, subnet_id=""):
    print('Destroying subnet %s' % str(subnet_id))
    session = botocore.session.get_session()
    session.set_credentials(aws_creds.access_key, aws_creds.secret_key)
    ec2 = session.get_service('ec2')
    operation = ec2.get_operation('DeleteSubnet')
    endpoint = ec2.get_endpoint(region)
    http_response, response_data = operation.call(endpoint,
                                                  SubnetId=subnet_id)
    p3.pprint(str(http_response.status_code) + " - " + http_response.reason)
    p3.pprint(response_data)

    if http_response.status_code != 200:
        raise(ApiException)

    return True
Beispiel #38
0
def destroy_elb(aws_creds, region, lb_name=""):
    print('Destroying elb')
    session = botocore.session.get_session()
    session.set_credentials(aws_creds.access_key, aws_creds.secret_key)
    elb = session.get_service('elb')
    operation = elb.get_operation('DeleteLoadBalancer')
    endpoint = elb.get_endpoint(region)
    http_response, response_data = operation.call(endpoint,
                                                  LoadBalancerName=lb_name)

    p3.pprint(str(http_response.status_code) + " - " + http_response.reason)
    p3.pprint(response_data)

    if http_response.status_code != 200:
        raise(ApiException)

    return True
def main():

    start = time.time()

    print
    print "Show how to use the Batch function with SQS queues"

    # change this to your queue_url
    queue_url = 'https://some queue url'
    region     = 'us-east-1' # change this to your region

    print 'queue_url: ' + queue_url
    print '   region: ' + region

    session = botocore.session.get_session()
    sqs = session.get_service('sqs')
    endpoint = sqs.get_endpoint(region)
    send = sqs.get_operation('SendMessageBatch')

    print "Send  99 messages, 10 at a time"
    # message bodies can be json, xml, plan text and a lot more
    message_body_prefix =  "1234567890_" # this can be any ascii text and can contain some other characters

    entries = list()
    counter=0
    while counter <99:
        # send batch every 10 messages,
        entry = {'Id': "testmsg_" + str(counter), 'MessageBody' : message_body_prefix + str(counter)}
        entries.append(entry)
        # bacth size of up to 10 messages allowed
        if (counter+1) % 10 == 0:
            http_response, response_data = send.call(endpoint, queue_url = queue_url, entries = entries)
            # in production you can get a 200 status value and some of the messages still might have failed to be sent
            # see the aws documentation on how to detect individual message failures
            if not http_response.status_code == 200:
                print "error sending batch messages: " + str(http_response.status_code) + " " + str(response_data)
            entries = list()
        counter += 1

    # process any remaining messages
    if len(entries) > 0:
        http_response, response_data = send.call(endpoint, queue_url = queue_url, entries = entries)
        if not http_response.status_code == 200:
            print "error sending batch messages: " + str(http_response.status_code) + " " + str(response_data)

    print "Total elapsed time (sec): " + str(time.time() - start)
Beispiel #40
0
def find_instance(instance_id, config_profiles):
    """
    Search through all AWS profiles and regions for an instance.
    """
    profiles_session = botocore.session.get_session()

    for profile in profiles_session.available_profiles:
        # Re-using the same session doesn't work
        session = botocore.session.get_session()
        session.profile = profile

        # Prefer regions listed in the profile
        regions = None
        if profile in config_profiles:
            regions = config_profiles[profile].regions

        if not regions:
            region = session.get_config_variable('region')
            if not region:
                continue
            else:
                regions = [region]

        ec2 = session.get_service('ec2')
        operation = ec2.get_operation('DescribeInstances')
        for region in regions:
            logger.debug("connecting to region '%s' with AWS profile '%s'...",
                         region, profile)
            endpoint = ec2.get_endpoint(region)
            try:
                resp, data = operation.call(
                    endpoint,
                    instance_ids=[instance_id],
                )
            except botocore.exceptions.NoCredentialsError:
                break

            if resp.status_code == 200:
                for reservation in data['Reservations']:
                    for instance_data in reservation['Instances']:
                        return Instance.from_instance_data(instance_data,
                                                           profile, region)
            else:
                continue

    return None
def do_action(region, awsKey, awsSecret, ownerID, actionObject, actionName, objectUUID, parameters):
    session = botocore.session.get_session()
    session.set_credentials(outscaleKey, outscaleSecret)
    ec2 = session.get_service('ec2')

    if actionObject == 'instance':
        if actionName == 'start':
            try:
#                 connEC2.start_instances(instance_ids=[objectUUID])
                operation = ec2.get_operation('StartInstances');
                http_response, response_data = operation.call(endpoint,instance_ids = [objectUUID])
                return 200, ""
            except boto.exception.EC2ResponseError as e:
                description = "Unknown description"
                if len(e.errors):
                    tmpVar = e.errors[0]
                    if len(tmpVar):
                        description = tmpVar[0]
                return e.status, description
        elif actionName == 'stop':
            try:
                operation = ec2.get_operation('StopInstances');
                http_response, response_data = operation.call(endpoint,instance_ids = [objectUUID])
                return 200, ""
            except boto.exception.EC2ResponseError as e:
                description = "Unknown description"
                if len(e.errors):
                    tmpVar = e.errors[0]
                    if len(tmpVar):
                        description = tmpVar[0]
                return e.status, description

    elif actionObject == 'volume':
        if actionName == 'delete':
            try:
                operation = ec2.get_operation('DeleteVolume');
                http_response, response_data = operation.call(endpoint,volume_ids = [objectUUID])
                return 200,""
            except boto.exception.EC2ResponseError as e:
                description = "Unknown description"
                if len(e.errors):
                    tmpVar = e.errors[0]
                    if len(tmpVar):
                        description = tmpVar[0]
                return e.status, description
        pass;
Beispiel #42
0
def test_xml_parsing():
    for dp in ['responses', 'errors']:
        data_path = os.path.join(os.path.dirname(__file__), 'xml')
        data_path = os.path.join(data_path, dp)
        session = create_session()
        xml_files = glob.glob('%s/*.xml' % data_path)
        service_names = set()
        for fn in xml_files:
            service_names.add(os.path.split(fn)[1].split('-')[0])
        for service_name in service_names:
            service = session.get_service(service_name)
            service_xml_files = glob.glob('%s/%s-*.xml' %
                                          (data_path, service_name))
            for xmlfile in service_xml_files:
                expected = _get_expected_parsed_result(xmlfile)
                operation_obj = _get_operation_obj(service, xmlfile)
                raw_response_body = _get_raw_response_body(xmlfile)
                yield _test_parsed_response, xmlfile, raw_response_body, operation_obj, expected
Beispiel #43
0
def _make_call_with_errors(session, service_name, region_name, operation_name,
                           kwargs):
    service = session.get_service(service_name)
    endpoint = service.get_endpoint(region_name)
    operation = service.get_operation(operation_name)
    original_send = adapters.HTTPAdapter.send

    def mock_http_adapter_send(self, *args, **kwargs):
        if not getattr(self, '_integ_test_error_raised', False):
            self._integ_test_error_raised = True
            raise ConnectionError("Simulated ConnectionError raised.")
        else:
            return original_send(self, *args, **kwargs)

    with mock.patch('botocore.vendored.requests.adapters.HTTPAdapter.send',
                    mock_http_adapter_send):
        response = operation.call(endpoint, **kwargs)[1]
        assert_true(
            'Error' not in response, "Request was not retried properly, "
            "received error:\n%s" % pformat(response))
Beispiel #44
0
def launch_cmd(role_arn, profile=None):
    session = botocore.session.get_session()
    session.profile = profile
    sts = session.get_service('sts')
    endpoint = sts.get_endpoint()
    op = sts.get_operation('AssumeRole')
    creds = op.call(endpoint, role_arn=role_arn, role_session_name='foobar')[1]
    d = {'sessionId': creds['Credentials']['AccessKeyId'],
         'sessionKey': creds['Credentials']['SecretAccessKey'],
         'sessionToken': creds['Credentials']['SessionToken']}
    json_str = json.dumps(d)
    params = {'Action': 'getSigninToken',
              'Session': json_str}
    r = botocore.vendored.requests.get(sign_in_url, params=params)
    d = json.loads(r.text)
    d['Action'] = 'login'
    d['Issuer'] = issuer_url
    d['Destination'] = console_url
    uri = sign_in_url + '?' + urlencode(d)
    webbrowser.open(uri)
Beispiel #45
0
def _uhg_test_json_parsing():
    input_path = os.path.join(os.path.dirname(__file__), 'json')
    input_path = os.path.join(input_path, 'inputs')
    output_path = os.path.join(os.path.dirname(__file__), 'json')
    output_path = os.path.join(output_path, 'outputs')
    session = botocore.session.get_session()
    jsonfiles = glob.glob('%s/*.json' % input_path)
    service_names = set()
    for fn in jsonfiles:
        service_names.add(os.path.split(fn)[1].split('-')[0])
    for service_name in service_names:
        service = session.get_service(service_name)
        service_json_files = glob.glob('%s/%s-*.json' %
                                       (input_path, service_name))
        for jsonfile in service_json_files:
            expected = _get_expected_parsed_result(jsonfile)
            operation_obj = _get_operation_obj(service, jsonfile)
            with open(jsonfile, 'rb') as f:
                raw_response_body = f.read()
            yield _test_parsed_response, jsonfile, raw_response_body, operation_obj, expected
Beispiel #46
0
def test_json_parsing():
    input_path = os.path.join(os.path.dirname(__file__), 'json')
    input_path = os.path.join(input_path, 'inputs')
    output_path = os.path.join(os.path.dirname(__file__), 'json')
    output_path = os.path.join(output_path, 'outputs')
    session = botocore.session.get_session()
    jsonfiles = glob.glob('%s/*.json' % input_path)
    service_names = set()
    for fn in jsonfiles:
        service_names.add(os.path.split(fn)[1].split('-')[0])
    for service_name in service_names:
        service = session.get_service(service_name)
        service_input_files = glob.glob('%s/%s-*.json' %
                                        (input_path, service_name))
        for inputfile in service_input_files:
            dirname, filename = os.path.split(inputfile)
            outputfile = os.path.join(output_path, filename)
            basename = os.path.splitext(filename)[0]
            sn, opname = basename.split('-', 1)
            operation = service.get_operation(opname)
            r = JSONResponse(session, operation)
            headers = {}
            with open(inputfile, 'r') as fp:
                jsondoc = fp.read()
                # Try to get any headers using a special key
                try:
                    parsed = json.loads(jsondoc)
                except ValueError:
                    # This will error later, let it go on
                    parsed = {}
                if '__headers' in parsed:
                    headers = parsed['__headers']
                    del parsed['__headers']
                    jsondoc = json.dumps(parsed)
            r.parse(jsondoc.encode('utf-8'), 'utf-8')
            r.merge_header_values(headers)
            save_jsonfile(outputfile, r)
            fp = open(outputfile)
            data = json.load(fp)
            fp.close()
            yield check_dicts, inputfile, r.get_value(), data
    def _get_instance_ip(self, instance_id):
        session = botocore.session.get_session()
        ec2 = session.get_service("ec2")
        describe = ec2.get_operation("DescribeInstances")
        response, data = describe.call(ec2.get_endpoint(
            self._notification.region),
                                       instance_ids=[instance_id])

        # Swallow up any error responses and return nothing.
        if response.status_code != 200 or 'Reservations' not in data:
            log.error(
                u"Failed to describe instance %s with status_code %s: %s" %
                (instance_id, response.status_code, data))
            return None

        for reservation in data.get('Reservations', []):
            for instance in reservation.get('Instances', []):
                if instance.get('InstanceId', '') == instance_id:
                    return instance.get('PublicIpAddress')

        return None
Beispiel #48
0
def test_json_errors_parsing():
    # The outputs/ directory has sample output responses
    # For each file in outputs/ there's a corresponding file
    # in expected/ that has the expected parsed response.
    base_dir = os.path.join(os.path.dirname(__file__), 'json')
    json_responses_dir = os.path.join(base_dir, 'errors')
    expected_parsed_dir = os.path.join(base_dir, 'expected')
    session = botocore.session.get_session()
    for json_response_file in os.listdir(json_responses_dir):
        # Files look like: 'datapipeline-create-pipeline.json'
        service_name, operation_name = os.path.splitext(
            json_response_file)[0].split('-', 1)
        expected_parsed_response = os.path.join(expected_parsed_dir,
                                                json_response_file)
        raw_response_file = os.path.join(json_responses_dir,
                                         json_response_file)
        with open(expected_parsed_response) as f:
            expected = json.load(f)
        service_obj = session.get_service(service_name)
        operation_obj = service_obj.get_operation(operation_name)
        with open(raw_response_file, 'rb') as f:
            raw_response_body = f.read()
        yield _test_parsed_response, raw_response_file, raw_response_body, operation_obj, expected
    def get_processor_id(self):
        session = botocore.session.get_session()
        autoscaling = session.get_service("autoscaling")
        describe = autoscaling.get_operation("DescribeTags")
        response, data = describe.call(
            autoscaling.get_endpoint(self._notification.region),
            filters=[{
                "Name": "auto-scaling-group",
                "Values": [self._notification.auto_scaling_group]
            }])

        # Swallow up any error responses and return nothing.
        if response.status_code != 200 or 'Tags' not in data:
            log.error(
                u"Failed to retrieve tags for ASG %s with status_code %s: %s" %
                (self._notification.auto_scaling_group, response.status_code,
                 data))
            return None

        for tag in data.get('Tags', []):
            if tag.get('Key', '') == 'ProcessorId':
                return tag.get('Value')

        return None
Beispiel #50
0
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
#  http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
#

import botocore.session

session = botocore.session.get_session()
service = session.get_service('opsworks')
endpoint = service.get_endpoint('us-east-1')


def _make_api_call(api_operation, **kwargs):
    """
  Make an API call using botocore for the given api operation.
  :param api_operation: Operation name to perform
  :param kwargs: Any additional arguments to be passed to the service call
  :return: If an OK response returned, returns the data from the call.  Will exit(1) otherwise
  """
    operation = service.get_operation(api_operation)
    response, response_data = operation.call(endpoint, **kwargs)
    if response.ok:
        return response_data
Beispiel #51
0
def collect(model, hours):
    row = model.progress.get_item(name='end_time')
    if row['timestamp'] is None:
        logging.debug('using initial window of -%d hours', hours)
        start_time = arrow.utcnow().replace(hours=-hours)
    else:
        start_time = arrow.get(row['timestamp'])
    logging.debug('start time: %s', start_time)

    end_time = arrow.utcnow()
    logging.debug('end time: %s', end_time)

    all_regions = set()
    all_product_descriptions = set()
    all_instance_types = set()
    all_instance_zones = set()

    session = botocore.session.get_session()
    ec2 = session.get_service('ec2')
    operation = ec2.get_operation('DescribeSpotPriceHistory')

    for region in ec2.region_names:
        if any(region.startswith(x) for x in _EXCLUDED_REGION_PREFIXES):
            continue
        all_regions.add(region)

        next_token = None
        while True:
            logging.debug('collecting spot prices from region: %s', region)
            endpoint = ec2.get_endpoint(region)
            if next_token:
                response, data = operation.call(
                    endpoint,
                    start_time=start_time.format(_FMT),
                    end_time=end_time.format(_FMT),
                    next_token=next_token,
                )
            else:
                response, data = operation.call(
                    endpoint,
                    start_time=start_time.format(_FMT),
                )
            next_token = data.get('NextToken')
            logging.debug('next_token: %s', next_token)
            spot_data = data.get('SpotPriceHistory', [])

            #conn = boto.ec2.connect_to_region(r.name)
            #logging.debug('getting spot prices for region: %s', r.name)
            #data = conn.get_spot_price_history(start_time=start_time)

            logging.debug('saving %d spot prices for region: %s',
                          len(spot_data), region)
            with model.spot_prices.batch_write() as batch:
                for d in spot_data:
                    all_product_descriptions.add(d['ProductDescription'])
                    all_instance_types.add(d['InstanceType'])
                    all_instance_zones.add((
                        d['ProductDescription'],
                        d['InstanceType'],
                        d['AvailabilityZone'],
                    ))
                    batch.put_item(
                        data={
                            'instance_zone_id':
                            ':'.join([
                                d['ProductDescription'],
                                d['InstanceType'],
                                d['AvailabilityZone'],
                            ]),
                            'timestamp':
                            arrow.get(d['Timestamp']).timestamp,
                            'price':
                            decimal.Decimal(str(d['SpotPrice'])),
                        })
            if not next_token:
                break

    logging.debug('saving %d regions', len(all_regions))
    with model.regions.batch_write() as batch:
        for i in all_regions:
            batch.put_item(data={'region': i})

    logging.debug('saving %d product_descriptions',
                  len(all_product_descriptions))
    with model.product_descriptions.batch_write() as batch:
        for i in all_product_descriptions:
            batch.put_item(data={'product_description': i})

    logging.debug('saving %d instance_types', len(all_instance_types))
    with model.instance_types.batch_write() as batch:
        for i in all_instance_types:
            batch.put_item(data={'instance_type': i})

    logging.debug('saving %d instance_zones', len(all_instance_zones))
    with model.instance_zones.batch_write() as batch:
        for i in all_instance_zones:
            batch.put_item(data={
                'instance_id': ':'.join([i[0], i[1]]),
                'zone': i[2],
            })

    logging.debug('saving end_time')
    with model.progress.batch_write() as batch:
        batch.put_item(data={
            'name': 'end_time',
            'timestamp': end_time.timestamp,
        })
Beispiel #52
0
def saml_login(profile,
               region,
               url,
               user,
               password=None,
               role=None,
               print_env_vars=False,
               overwrite_default_credentials=False):
    session = requests.Session()
    response = session.get(url)

    keyring_key = 'aws-minion.saml'
    password = password or keyring.get_password(keyring_key, user)
    if not password:
        password = click.prompt('Password', hide_input=True)

    with Action('Authenticating against {url}..', **vars()) as act:
        # NOTE: parameters are hardcoded for Shibboleth IDP
        data = {'j_username': user, 'j_password': password, 'submit': 'Login'}
        response2 = session.post(response.url, data=data)
        saml_xml = get_saml_response(response2.text)
        if not saml_xml:
            act.error('LOGIN FAILED')
            click.secho(
                'SAML login with user "{}" failed, please check your username and password.\n'
                .format(user) +
                'You might need to change the password in your keyring (e.g. Mac OS X keychain) '
                + 'or use the "--password" option.',
                bold=True,
                fg='blue')
            return

        url = get_form_action(response2.text)
        encoded_xml = codecs.encode(saml_xml.encode('utf-8'), 'base64')
        response3 = session.post(url, data={'SAMLResponse': encoded_xml})
        account_names = get_account_names(response3.text)

    keyring.set_password(keyring_key, user, password)

    with Action('Checking SAML roles..') as act:
        roles = get_roles(saml_xml)
        if not roles:
            act.error('NO VALID ROLE FOUND')
            return

    if len(roles) == 1:
        provider_arn, role_arn = roles[0]
    elif role:
        matching_roles = [
            _role for _role in roles
            if role in get_role_label(_role, account_names)
        ]
        if not matching_roles or len(matching_roles) > 1:
            raise click.UsageError(
                'Given role (--role) was not found or not unique')
        provider_arn, role_arn = matching_roles[0]
    else:
        roles.sort()
        provider_arn, role_arn = choice(
            'Multiple roles found, please select one.',
            [(r, get_role_label(r, account_names)) for r in roles])

    with Action('Assuming role "{role_label}"..',
                role_label=get_role_label((provider_arn, role_arn),
                                          account_names)):
        saml_assertion = codecs.encode(saml_xml.encode('utf-8'),
                                       'base64').decode('ascii').replace(
                                           '\n', '')

        # botocore NEEDS some credentials, but does not care about their actual values
        os.environ['AWS_ACCESS_KEY_ID'] = 'fake123'
        os.environ['AWS_SECRET_ACCESS_KEY'] = 'fake123'

        try:
            session = botocore.session.get_session()
            sts = session.get_service('sts')
            operation = sts.get_operation('AssumeRoleWithSAML')

            endpoint = sts.get_endpoint(region)
            endpoint._signature_version = None
            http_response, response_data = operation.call(
                endpoint,
                role_arn=role_arn,
                principal_arn=provider_arn,
                SAMLAssertion=saml_assertion)
        finally:
            del os.environ['AWS_ACCESS_KEY_ID']
            del os.environ['AWS_SECRET_ACCESS_KEY']

        key_id = response_data['Credentials']['AccessKeyId']
        secret = response_data['Credentials']['SecretAccessKey']
        session_token = response_data['Credentials']['SessionToken']

    if print_env_vars:
        # different AWS SDKs expect either AWS_SESSION_TOKEN or AWS_SECURITY_TOKEN, so set both
        click.secho(dedent('''\
        # environment variables with temporary AWS credentials:
        export AWS_ACCESS_KEY_ID="{key_id}"
        export AWS_SECRET_ACCESS_KEY="{secret}"
        export AWS_SESSION_TOKEN="{session_token}")
        export AWS_SECURITY_TOKEN="{session_token}"''').format(**vars()),
                    fg='blue')

    profiles_to_write = set([profile])
    if overwrite_default_credentials:
        profiles_to_write.add('default')

    with Action('Writing temporary AWS credentials..'):
        for prof in profiles_to_write:
            write_aws_credentials(prof, key_id, secret, session_token)
Beispiel #53
0
def main(st, et):
    if st:
        start_time = st
    else:
        start_time = arrow.utcnow().replace(minutes=common.DEFAULT_LOOKBACK_MINUTES)

    if et:
        end_time = et
    else:
        end_time = arrow.utcnow()

    all_regions = set()
    all_product_descriptions = set()
    all_instance_types = set()
    all_instance_zones = set()

    session = botocore.session.get_session()
    ec2 = session.get_service('ec2')
    operation = ec2.get_operation('DescribeSpotPriceHistory')
    local_timeseries = {}

    vals = {}
    tss = {}
    print 'Preparing...'
    for region in AWS_ON_DEMAND_PRICES:
        reg_key = region.replace('-','_')
        if region not in vals:
            vals[reg_key] = {}
            tss[reg_key] = {}
        for zone in AWS_REGIONS_TO_ZONES[region]:
            # print 'Zone: %s' % zone
            if zone not in vals[reg_key]:
                vals[reg_key][zone] = {}
                tss[reg_key][zone] = {}
            for product in AWS_ON_DEMAND_PRICES[region]:
                # print 'Product: %s' % product
                if not AWS_ON_DEMAND_PRICES[region][product]:
                    print "WARNING: Empty %s:%s" % (region, product)
                    continue
                if product not in vals[reg_key][zone]:
                    vals[reg_key][zone][product] = {}
                    tss[reg_key][zone][product] = {}
                for inst_type in common.AWS_ON_DEMAND_PRICES[region][product]:
                    # print "%s/%s/%s/%s" % (reg_key, zone, product, inst_type)
                    vals[reg_key][zone][product][inst_type] = []
                    tss[reg_key][zone][product][inst_type] = []
    #sys.exit(1)
    for region in ec2.region_names:
        all_regions.add(region)
        cnt = 0
        next_token = None
        print 'Collecting spot prices from region: %s for %s to %s' % (region, start_time.format(_FMT), end_time.format(_FMT))
        sys.stdout.flush()
        # if region != 'us-east-1':
        #continue
        while True:
            endpoint = ec2.get_endpoint(region)
            if next_token:
                response, data = operation.call(
                    endpoint,
                    start_time=start_time.format(_FMT),
                    end_time=end_time.format(_FMT),
                    next_token=next_token,
                )
            else:
                response, data = operation.call(
                    endpoint,
                    start_time=start_time.format(_FMT),
                )
            next_token = data.get('NextToken')
            spot_data = data.get('SpotPriceHistory', [])
            first_entry_in_batch = True
            sys.stdout.flush()
            for d in spot_data:
                
                ts = common.ts_from_aws(d)
                
                if first_entry_in_batch:
                    print "Fetched %s records starting with %s" % (len(spot_data), d['Timestamp'])
                    first_entry_in_batch = False
                
                # {u'Timestamp': '2014-04-10T23:49:21.000Z', u'ProductDescription': 'Linux/UNIX (Amazon VPC)', u'InstanceType': 'hi1.4xlarge', u'SpotPrice': '0.128300', u'AvailabilityZone': 'us-east-1b'}
                reg_key = region.replace('-','_')
                d['Region'] = reg_key
                
                
                d['InstanceTypeNorm'] = d['InstanceType'].replace('.','_')

                value = d['SpotPrice']

                zone = d['AvailabilityZone'].replace('-','_')
                product = d['ProductDescription'].replace('-','_').replace('(','').replace(')','_').replace(' ','_').replace('/','_')
                if product.endswith('_'):
                    product=product[:-1]
                inst_type = d['InstanceTypeNorm'].replace('-','_')

                tags = { 
                    'cloud' : 'aws',
                    'region' : reg_key,
                    'zone'  : zone,
                    'product' : product,
                    'inst_type' : inst_type,
                    'units' : 'USD'
                    }
                try:
                    vals[reg_key][zone][product][inst_type].append(value)
                    tss[reg_key][zone][product][inst_type].append(ts)
                except KeyError:
                    print "No on-demand info for %s/%s/%s/%s" % (reg_key,zone,product,inst_type)

                common.otsdb_send('price_spot', value, tags, ts, False)  
                tags['price_type'] = 'spot'
                common.otsdb_send('price', value, tags, ts, False)
                cnt += 1

            if not next_token:
                break
        print "Found %s price points" % cnt
        for zone in tss[reg_key]:
            for product in tss[reg_key][zone]:
                for inst_type in tss[reg_key][zone][product]:
                    if not tss[reg_key][zone][product][inst_type]:
                        print "No spot info for %s/%s/%s/%s." % (reg_key, zone, product, inst_type)
                        continue
                    print "%s/%s/%s/%s" % (reg_key, zone, product, inst_type)
                    tags = { 
                        'cloud' : 'aws',
                        'region' : reg_key,
                        'zone'  : zone,
                        'product' : product,
                        'inst_type' : inst_type,
                        'units' : 'USD'
                        }

                    tss_ts = tss[reg_key][zone][product][inst_type]
                    tss_ts.sort()
                    tss_dt = to_datetime(tss_ts, unit='s')
                    s_data = vals[reg_key][zone][product][inst_type]
                    s1 = Series(s_data, tss_dt)
                    # print "Creating Series(%s, %s) from %s; length: %s" % (s_data, tss_dt, tss_ts, len(s1))

                    if len(s1) > 1:
                        # We already took care of 1-length (no fill)
                        s2 = s1.asfreq('1Min', method='ffill')
                        # print "Sparse series:\n%s\n" % s1
                        # print "Filled series:\n%s\n" % s2
                        # print "Sparse: %s, filled: %s" % (len(s1), len(s2))
                        for (dt,value) in s2.iteritems():
                            ts = arrow.Arrow.fromdatetime(dt).timestamp
                            common.otsdb_send('price_spot', value, tags, ts, False)  
                            tags['price_type'] = 'spot'

                            common.otsdb_send('price', value, tags, ts, False)
        sys.stdout.flush()