def greengrass_hello_world_run():
    # Create the green grass client so that we can send messages to IoT console
    client = greengrasssdk.client('iot-data')
    iot_topic = '$aws/things/{}/infer'.format(os.environ['AWS_IOT_THING_NAME'])

    # Stream configuration, name and retention
    # Note that the name will appear as deeplens-myStream
    stream_name = 'myStream'
    retention = 2  #hours

    # Amount of time to stream
    wait_time = 60 * 60 * 5  #seconds

    # Use the boto session API to grab credentials
    session = Session()
    creds = session.get_credentials()

    # Create producer and stream.
    producer = dkv.createProducer(creds.access_key, creds.secret_key,
                                  creds.token, "us-east-1")
    client.publish(topic=iot_topic, payload="Producer created")
    kvs_stream = producer.createStream(stream_name, retention)
    client.publish(topic=iot_topic,
                   payload="Stream {} created".format(stream_name))

    # Start putting data into the KVS stream
    kvs_stream.start()
    client.publish(topic=iot_topic, payload="Stream started")
    time.sleep(wait_time)
    # Stop putting data into the KVS stream
    kvs_stream.stop()
    client.publish(topic=iot_topic, payload="Stream stopped")
Example #2
0
    def test_credential_process_returns_error(self):
        config = (
            '[profile processcreds]\n'
            'credential_process = %s --raise-error\n'
        )
        config = config % self.credential_process
        with temporary_file('w') as f:
            f.write(config)
            f.flush()
            self.environ['AWS_CONFIG_FILE'] = f.name

            session = Session(profile='processcreds')

            # This regex validates that there is no substring: b'
            # The reason why we want to validate that is that we want to
            # make sure that stderr is actually decoded so that in
            # exceptional cases the error is properly formatted.
            # As for how the regex works:
            # `(?!b').` is a negative lookahead, meaning that it will only
            # match if it is not followed by the pattern `b'`. Since it is
            # followed by a `.` it will match any character not followed by
            # that pattern. `((?!hede).)*` does that zero or more times. The
            # final pattern adds `^` and `$` to anchor the beginning and end
            # of the string so we can know the whole string is consumed.
            # Finally `(?s)` at the beginning makes dots match newlines so
            # we can handle a multi-line string.
            reg = r"(?s)^((?!b').)*$"
            with self.assertRaisesRegexp(CredentialRetrievalError, reg):
                session.get_credentials()
Example #3
0
def call_lambda(function, data):
    s = Session()
    clientLambda = s.create_client("lambda",
                                   config=Config(retries={'max_attempts': 0}))
    clientLambda.invoke(FunctionName=function,
                        InvocationType="Event",
                        Payload=json.dumps(data))
    def setUp(self):
        self.env_original = os.environ.copy()
        self.environ_copy = os.environ.copy()
        super(TestAssumeRoleCredentials, self).setUp()
        os.environ = self.environ_copy
        self.parent_session = Session()
        self.iam = self.parent_session.create_client('iam')
        self.sts = self.parent_session.create_client('sts')
        self.tempdir = tempfile.mkdtemp()
        self.config_file = os.path.join(self.tempdir, 'config')

        # A role trust policy that allows the current account to call assume
        # role on itself.
        account_id = self.sts.get_caller_identity()['Account']
        self.role_policy = {
            "Version":
            "2012-10-17",
            "Statement": [{
                "Effect": "Allow",
                "Principal": {
                    "AWS": "arn:aws:iam::%s:root" % account_id
                },
                "Action": "sts:AssumeRole"
            }]
        }
Example #5
0
    def test_credential_process_returns_error(self):
        config = ('[profile processcreds]\n'
                  'credential_process = %s --raise-error\n')
        config = config % self.credential_process
        with temporary_file('w') as f:
            f.write(config)
            f.flush()
            self.environ['AWS_CONFIG_FILE'] = f.name

            session = Session(profile='processcreds')

            # This regex validates that there is no substring: b'
            # The reason why we want to validate that is that we want to
            # make sure that stderr is actually decoded so that in
            # exceptional cases the error is properly formatted.
            # As for how the regex works:
            # `(?!b').` is a negative lookahead, meaning that it will only
            # match if it is not followed by the pattern `b'`. Since it is
            # followed by a `.` it will match any character not followed by
            # that pattern. `((?!hede).)*` does that zero or more times. The
            # final pattern adds `^` and `$` to anchor the beginning and end
            # of the string so we can know the whole string is consumed.
            # Finally `(?s)` at the beginning makes dots match newlines so
            # we can handle a multi-line string.
            reg = r"(?s)^((?!b').)*$"
            with self.assertRaisesRegex(CredentialRetrievalError, reg):
                session.get_credentials()
Example #6
0
def get_credentials(profile: str = "default",
                    environment: Optional[Environment] = None) -> Environment:
    """
    Use session cache so users don't need to use MFA while there are valid
    session tokens. This is the behavior of the AWSCLI and what we are trying to
    emulate.

    Modified with support for profiles from:
    https://github.com/boto/botocore/pull/1338/#issuecomment-368472031
    """
    # By default the cache path is ~/.aws/boto/cache
    cli_cache = (Path.home() / ".aws" / "cli" / "cache").absolute()

    # Construct botocore session with cache
    session = Session(profile=profile)
    session.get_component("credential_provider").get_provider(
        "assume-role").cache = credentials.JSONFileCache(cli_cache)

    # return credentials from session
    creds = boto3.Session(botocore_session=session,
                          profile_name=profile).get_credentials()

    return {
        "AWS_ACCESS_KEY_ID": creds.access_key,
        "AWS_SECRET_ACCESS_KEY": creds.secret_key,
        "AWS_SESSION_TOKEN": creds.token,
    }
Example #7
0
    def setUp(self):
        self.env_original = os.environ.copy()
        self.environ_copy = os.environ.copy()
        super(TestAssumeRoleCredentials, self).setUp()
        os.environ = self.environ_copy
        # The tests rely on manipulating AWS_CONFIG_FILE,
        # but we also need to make sure we don't accidentally
        # pick up the ~/.aws/credentials file either.
        os.environ['AWS_SHARED_CREDENTIALS_FILE'] = str(uuid4())
        self.parent_session = Session()
        self.iam = self.parent_session.create_client('iam')
        self.sts = self.parent_session.create_client('sts')
        self.tempdir = tempfile.mkdtemp()
        self.config_file = os.path.join(self.tempdir, 'config')

        # A role trust policy that allows the current account to call assume
        # role on itself.
        account_id = self.sts.get_caller_identity()['Account']
        self.role_policy = {
            "Version":
            "2012-10-17",
            "Statement": [{
                "Effect": "Allow",
                "Principal": {
                    "AWS": "arn:aws:iam::%s:root" % account_id
                },
                "Action": "sts:AssumeRole"
            }]
        }
Example #8
0
    def load(cls, profile: str) -> Optional["AwsConfig"]:
        session = Session(profile=profile)
        try:
            c = session.get_scoped_config()
        except ProfileNotFound:
            raise MissingAwsConfigException(session.profile)

        def get(key: str, func: Callable = None):
            val = c.get(key)
            if val is None or val == "None":
                return None
            else:
                return func(val) if func else val

        try:
            return cls(
                profile=profile,
                azure_tenant_id=c["awsad-azure_tenant_id"],
                azure_app_id=c["awsad-azure_app_id"],
                azure_app_title=c["awsad-azure_app_title"],
                aws_default_role_arn=get("awsad-aws_default_role_arn"),
                aws_session_duration=get("awsad-aws_session_duration", int),
                aws_access_key_id=get("aws_access_key_id"),
                aws_secret_access_key=get("aws_secret_access_key"),
                aws_session_token=get("aws_session_token"),
                aws_expiration_time=get("awsad-aws_expiration_time",
                                        datetime.datetime.fromisoformat))
        except KeyError:
            raise MissingAwsConfigException(session.profile)
Example #9
0
def test_old_model_continues_to_work():
    # This test ensures that botocore can load the service models as they exist
    # today.  There's a directory in tests/functional/models that is a
    # snapshot of a service model.  This test ensures that we can continue
    # to stub an API call using this model.  That way if the models ever
    # change we have a mechanism to ensure that the existing models continue
    # to work with botocore.  The test should not change (with the exception
    # of potential changes to the ClientHTTPStubber), and the files in
    # tests/functional/models should not change!
    session = Session()
    loader = session.get_component('data_loader')
    # We're adding our path to the existing search paths so we don't have to
    # copy additional data files such as _retry.json to our TEST_MODELS_DIR.
    # We only care about the service model and endpoints file not changing.
    # This also prevents us from having to make any changes to this models dir
    # if we end up adding a new data file that's needed to create clients.
    # We're adding our TEST_MODELS_DIR as the first element in the list to
    # ensure we load the endpoints.json file from TEST_MODELS_DIR.  For the
    # service model we have an extra safety net where we can choose a custom
    # client name.
    loader.search_paths.insert(0, TEST_MODELS_DIR)

    # The model dir we copied was renamed to 'custom-lambda'
    # to ensure we're loading our version of the model and not
    # the built in one.
    client = session.create_client(
        'custom-acm',
        region_name='us-west-2',
        aws_access_key_id='foo',
        aws_secret_access_key='bar',
    )
    with ClientHTTPStubber(client) as stubber:
        stubber.add_response(url='https://acm.us-west-2.amazonaws.com/',
                             headers={
                                 'x-amzn-RequestId': 'abcd',
                                 'Date': 'Fri, 26 Oct 2018 01:46:30 GMT',
                                 'Content-Length': '29',
                                 'Content-Type': 'application/x-amz-json-1.1'
                             },
                             body=b'{"CertificateSummaryList":[]}')
        response = client.list_certificates()
        assert response == {
            'CertificateSummaryList': [],
            'ResponseMetadata': {
                'HTTPHeaders': {
                    'content-length': '29',
                    'content-type': 'application/x-amz-json-1.1',
                    'date': 'Fri, 26 Oct 2018 01:46:30 GMT',
                    'x-amzn-requestid': 'abcd'
                },
                'HTTPStatusCode': 200,
                'RequestId': 'abcd',
                'RetryAttempts': 0
            }
        }

    # Also verify we can use the paginators as well.
    assert client.can_paginate('list_certificates') is True
    assert client.waiter_names == ['certificate_validated']
Example #10
0
def create_botocore_session(profile=None, debug=False):
    # type: (str, bool) -> Session
    s = Session(profile=profile)
    _add_chalice_user_agent(s)
    if debug:
        s.set_debug_logger('')
        _inject_large_request_body_filter()
    return s
Example #11
0
def create_botocore_session(profile=None, debug=False):
    # type: (str, bool) -> Session
    s = Session(profile=profile)
    _add_chalice_user_agent(s)
    if debug:
        s.set_debug_logger('')
        _inject_large_request_body_filter()
    return s
Example #12
0
def assume_profile_role(role_profile, session_name="", session_duration=0):
    """Assume role described by role_profile and return the auth response."""

    # Get local profile config
    config = Session(profile=role_profile).get_scoped_config()

    # Construct assume role request
    assert "role_arn" in config, f"{role_profile} does not have role_arn."
    role_arn = config["role_arn"]
    rq = {
        "RoleArn": role_arn,
        "RoleSessionName": session_name or get_default_session_name(),
        # "DurationSeconds": 28800 # get_max_duration(role_arn)
    }

    # Specify duration if one was given
    if not session_duration:
        best_max = get_max_duration(role_arn)
        dt = timedelta(seconds=best_max)
        log.debug(f"Using duration of {humanize.naturaldelta(dt)} based on cache.")
        session_duration = best_max

    if session_duration:
        rq["DurationSeconds"] = session_duration
    else:
        log.debug(f"No session duration specified, letting AWS choose a default.")

    # Add MFA token if needed
    if "mfa_serial" in config:
        rq["SerialNumber"] = config["mfa_serial"]
        rq["TokenCode"] = questionary.text("Enter MFA code:").ask()

    # Log request before making it
    log.debug(f"Auth request:\n{json.dumps(rq, indent=4)}")

    # If source_profile is given, we should use it instead of the default profile
    source_profile = config.get("source_profile")
    log.info(f"Using source profile: {source_profile}")

    # Get auth token
    session = boto3.Session(profile_name=source_profile)
    sts = session.client("sts")
    response = sts.assume_role(**rq)

    # Cache session duration
    if session_duration:
        cache_max_duration(role_arn, session_duration)

    # Log auth token
    resp_str = json.dumps(response, indent=4, default=lambda o: str(o))
    log.debug(f"Auth response:\n{resp_str}")

    # Log expiration date
    local_exp = response["Credentials"]["Expiration"].astimezone()
    remaining = humanize.naturaldelta(local_exp - datetime.now(pytz.utc))
    log.info(f"The token will expire after {remaining} on {local_exp}")

    return response
Example #13
0
def call_lambda_sync(function, data):
    s = Session()
    clientLambda = s.create_client("lambda",
                                   config=Config(retries={'max_attempts': 0}))
    response = clientLambda.invoke(FunctionName=function,
                                   InvocationType="RequestResponse",
                                   Payload=json.dumps(data))
    body = json.loads(response['Payload'].read())
    return body
Example #14
0
 def __init__(self):
     """Set up default client suppliers for identity silos."""
     self._china_supplier = DefaultClientSupplier(botocore_session=Session(
         profile="china"))
     self._middle_east_supplier = DefaultClientSupplier(
         botocore_session=Session(profile="middle-east"))
     self._hong_kong_supplier = DefaultClientSupplier(
         botocore_session=Session(profile="hong-kong"))
     self._default_supplier = DefaultClientSupplier()
Example #15
0
    def __init__(self) -> None:
        current_session = Session()
        region = current_session.get_config_variable('region')
        creds = current_session.get_credentials()
        self.signer = SigV4Auth(creds, 'execute-api', region)

        analysis_api_fqdn = os.environ.get('ANALYSIS_API_FQDN')
        analysis_api_path = os.environ.get('ANALYSIS_API_PATH')
        self.url = 'https://' + analysis_api_fqdn + '/' + analysis_api_path
Example #16
0
 def run(self):
     print('Running test...')
     client = Session().create_client('lambda', region_name=self.region)
     ret = client.invoke(FunctionName=self.name,
                         InvocationType='RequestResponse',
                         Payload=self.testdata.encode())
     print('HTTP Status Code: %s' %
           ret['ResponseMetadata']['HTTPStatusCode'])
     print(ret['Payload'].read())
Example #17
0
def create_session():
    boto_session = Session()
    boto_session.lazy_register_component('data_loader',
                                         lambda: create_loader())
    if 'REQUESTS_CA_BUNDLE' not in os.environ:
        ca_bundle_path = extract_file_from_jar(
            "botocore/vendored/requests/cacert.pem")
        os.environ['REQUESTS_CA_BUNDLE'] = ca_bundle_path
    return boto_session
Example #18
0
def test_old_model_continues_to_work():
    # This test ensures that botocore can load the service models as they exist
    # today.  There's a directory in tests/functional/models that is a
    # snapshot of a service model.  This test ensures that we can continue
    # to stub an API call using this model.  That way if the models ever
    # change we have a mechanism to ensure that the existing models continue
    # to work with botocore.  The test should not change (with the exception
    # of potential changes to the ClientHTTPStubber), and the files in
    # tests/functional/models should not change!
    session = Session()
    loader = session.get_component('data_loader')
    # We're adding our path to the existing search paths so we don't have to
    # copy additional data files such as _retry.json to our FIXED_MODELS_DIR.
    # We only care about the service model and endpoints file not changing.
    # This also prevents us from having to make any changes to this models dir
    # if we end up adding a new data file that's needed to create clients.
    # We're adding our FIXED_MODELS_DIR as the first element in the list to
    # ensure we load the endpoints.json file from FIXED_MODELS_DIR.  For the
    # service model we have an extra safety net where we can choose a custom
    # client name.
    loader.search_paths.insert(0, FIXED_MODELS_DIR)

    # The model dir we copied was renamed to 'custom-lambda'
    # to ensure we're loading our version of the model and not
    # the built in one.
    client = session.create_client(
        'custom-acm', region_name='us-west-2',
        aws_access_key_id='foo', aws_secret_access_key='bar',
    )
    with ClientHTTPStubber(client) as stubber:
        stubber.add_response(
            url='https://acm.us-west-2.amazonaws.com/',
            headers={'x-amzn-RequestId': 'abcd',
                     'Date': 'Fri, 26 Oct 2018 01:46:30 GMT',
                     'Content-Length': '29',
                     'Content-Type': 'application/x-amz-json-1.1'},
            body=b'{"CertificateSummaryList":[]}')
        response = client.list_certificates()
        assert_equal(
            response,
            {'CertificateSummaryList': [],
             'ResponseMetadata': {
                 'HTTPHeaders': {
                     'content-length': '29',
                     'content-type': 'application/x-amz-json-1.1',
                     'date': 'Fri, 26 Oct 2018 01:46:30 GMT',
                     'x-amzn-requestid': 'abcd'},
                 'HTTPStatusCode': 200,
                 'RequestId': 'abcd',
                 'RetryAttempts': 0}
             }
        )

    # Also verify we can use the paginators as well.
    assert_equal(client.can_paginate('list_certificates'), True)
    assert_equal(client.waiter_names, ['certificate_validated'])
Example #19
0
 def __init__(self, collectionId):
     self.collectionId = collectionId
     self.faceId = None
     self.allThreadStarted = False
     self._finished = False
     self.threads = []
     self.resultsQ = Queue.Queue()
     config = Config(connect_timeout=1, read_timeout=2)
     session = Session()
     self.rekClient = session.create_client('rekognition', config=config)
Example #20
0
def moto_server():
    # Start moto server
    proc = Popen(["moto_server", "s3", "-p", "8082"], stderr=DEVNULL, stdout=DEVNULL)
    time.sleep(1)
    with mock_s3(), proc:
        # Make sure bucket exists
        session = Session()
        client = session.create_client("s3", endpoint_url=f"http://{s3_netloc}")
        yield lambda: s3_reset(client)
        proc.kill()
Example #21
0
def run():
    "Entry proint"

    obj = BotocoreClientHandler(service='ecr')
    parser = obj._parse_args()
    print(parser.profile)
    session = Session(profile=parser.profile)
    print(dir(session))
    print(session.get_config_variable('region'))
    print(session.get_credentials(), session.profile)
def write_image_to_s3(img):
    session = Session()
    s3 = session.create_client('s3')
    # File name matches file name in s3-image-processing
    file_name = 'DeepLens/face.jpg'
    # You can contorl the size and quality of the image
    encode_param=[int(cv2.IMWRITE_JPEG_QUALITY),90]
    _, jpg_data = cv2.imencode('.jpg', img, encode_param)
    response = s3.put_object(ACL='public-read-write', Body=jpg_data.tostring(),Bucket='deeplens-fd-family',Key=file_name)
    image_url = 'https://s3.amazonaws.com/deeplens-fd-family/'+file_name
    return image_url
Example #23
0
def s3(s3_base):
    from botocore.session import Session

    os.environ.setdefault("AWS_ACCESS_KEY_ID", "foobar_key")
    os.environ.setdefault("AWS_SECRET_ACCESS_KEY", "foobar_secret")
    # NB: we use the sync botocore client for setup
    session = Session()
    client = session.create_client("s3", endpoint_url=endpoint_uri)
    client.create_bucket(Bucket=BUCKET_NAME)

    yield
Example #24
0
def create_botocore_session(profile=None, debug=False,
                            connection_timeout=None):
    # type: (str, bool, int) -> Session
    s = Session(profile=profile)
    _add_chalice_user_agent(s)
    if debug:
        s.set_debug_logger('')
        _inject_large_request_body_filter()
    if connection_timeout is not None:
        config = BotocoreConfig(connect_timeout=connection_timeout)
        s.set_default_client_config(config)
    return s
Example #25
0
def get_creds():
    global access_key, secret_key, token
    session = Session()
    creds = session.get_credentials()

    if creds is None:
        logger.info("no credentials found: " + str(creds))
        return None

    access_key = creds.access_key
    secret_key = creds.secret_key
    token = creds.token
Example #26
0
def _public_apis():
    session = Session()

    # Mimic the scenario that user does not have aws credentials setup
    session.get_credentials = mock.Mock(return_value=None)

    for service_name in PUBLIC_API_TESTS:
        client = session.create_client(service_name, REGIONS[service_name])
        for operation_name in PUBLIC_API_TESTS[service_name]:
            kwargs = PUBLIC_API_TESTS[service_name][operation_name]
            method = getattr(client, xform_name(operation_name))
            yield client, method, kwargs
Example #27
0
    def test_honors_aws_shared_credentials_file_env_var(self):
        with temporary_file('w') as f:
            f.write('[default]\n'
                    'aws_access_key_id=custom1\n'
                    'aws_secret_access_key=custom2\n')
            f.flush()
            os.environ['AWS_SHARED_CREDENTIALS_FILE'] = f.name
            s = Session()
            credentials = s.get_credentials()

            self.assertEqual(credentials.access_key, 'custom1')
            self.assertEqual(credentials.secret_key, 'custom2')
Example #28
0
 def setUp(self):
     self.session = Session()
     self.client = self.session.create_client(
         'logs', region_name='us-west-2')
     self.stubber = Stubber(self.client)
     self.group_name = 'groupName'
     self.start = '1970-01-01T00:00:01.000000'
     self.expected_start_as_milli_epoch = 1000
     self.filter_pattern = 'mypattern'
     self.log_timestamp = 1000
     self.expected_log_timestamp_as_datetime = datetime(
         1970, 1, 1, 0, 0, 1, tzinfo=tz.tzutc())
Example #29
0
    def test_honors_aws_shared_credentials_file_env_var(self):
        with temporary_file('w') as f:
            f.write('[default]\n'
                    'aws_access_key_id=custom1\n'
                    'aws_secret_access_key=custom2\n')
            f.flush()
            os.environ['AWS_SHARED_CREDENTIALS_FILE'] = f.name
            s = Session()
            credentials = s.get_credentials()

            self.assertEqual(credentials.access_key, 'custom1')
            self.assertEqual(credentials.secret_key, 'custom2')
Example #30
0
 def setUp(self):
     self.region = 'us-west-2'
     self.session = Session()
     self.session.set_config_variable('region', self.region)
     self.request_serializer = s3transfer.crt.BotocoreCRTRequestSerializer(
         self.session)
     self.bucket = "test_bucket"
     self.key = "test_key"
     self.files = FileCreator()
     self.filename = self.files.create_file('myfile', 'my content')
     self.expected_path = "/" + self.bucket + "/" + self.key
     self.expected_host = "s3.%s.amazonaws.com" % (self.region)
Example #31
0
def write_image_to_s3(img):
    print("writing image to s3")
    session = Session()
    s3 = session.create_client('s3')
    file_name = 'DeepLens/image-'+time.strftime("%Y%m%d-%H%M%S")+'.jpg'
    # You can contorl the size and quality of the image
    encode_param=[int(cv2.IMWRITE_JPEG_QUALITY),90]
    _, jpg_data = cv2.imencode('.jpg', img, encode_param)
    response = s3.put_object(ACL='public-read', Body=jpg_data.tostring(),Bucket='rekog-face',Key=file_name)

    image_url = 'https://s3.amazonaws.com/<BUCKET_NAME>/'+file_name
    return image_url
Example #32
0
def test_public_apis_will_not_be_signed():
    session = Session()

    # Mimic the scenario that user does not have aws credentials setup
    session.get_credentials = mock.Mock(return_value=None)

    for service_name in PUBLIC_API_TESTS:
        client = session.create_client(service_name, REGIONS[service_name])
        for operation_name in PUBLIC_API_TESTS[service_name]:
            kwargs = PUBLIC_API_TESTS[service_name][operation_name]
            method = getattr(client, xform_name(operation_name))
            yield _test_public_apis_will_not_be_signed, client, method, kwargs
Example #33
0
 def test_tagged_union_member_name_does_not_coincide_with_unknown_key(self):
     # This test ensures that operation models do not use SDK_UNKNOWN_MEMBER
     # as a member name. Thereby reserving SDK_UNKNOWN_MEMBER for the parser to
     # set as a key on the reponse object. This is necessary when the client
     # encounters a member that it is unaware of or not modeled.
     session = Session()
     for service_name in session.get_available_services():
         service_model = session.get_service_model(service_name)
         for shape_name in service_model.shape_names:
             shape = service_model.shape_for(shape_name)
             if hasattr(shape, 'is_tagged_union') and shape.is_tagged_union:
                 self.assertNotIn('SDK_UNKNOWN_MEMBER', shape.members)
Example #34
0
    async def build_region_list(self, service: str, chosen_regions=None, partition_name='aws'):
        service = 'ec2containerservice' if service == 'ecs' else service
        available_services = await run_concurrently(lambda: Session().get_available_services())

        if service not in available_services:
            raise Exception('Service ' + service + ' is not available.')

        regions = await run_concurrently(lambda: Session().get_available_regions(service, partition_name))

        if chosen_regions:
            return list((Counter(regions) & Counter(chosen_regions)).elements())
        else:
            return regions
def write_image_to_s3(img):
    session = Session()
    s3 = session.create_client('s3')
    file_name = 'DeepLens/image-' + time.strftime("%Y%m%d-%H%M%S") + '.jpg'
    # You can contorl the size and quality of the image
    encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 100]
    _, jpg_data = cv2.imencode('.jpg', img, encode_param)
    response = s3.put_object(
        ACL='public-read',
        Body=jpg_data.tostring(),
        Bucket='deeplens-sagemaker-f2ede581-36fb-41ce-b61c-881bb384ee61',
        Key=file_name)
    image_url = 'https://s3.amazonaws.com/deeplens-sagemaker-f2ede581-36fb-41ce-b61c-881bb384ee61/' + file_name
    return image_url
Example #36
0
 def setUp(self):
     self.global_args = mock.Mock()
     self._session = Session()
     self.sso_client = self._session.create_client(
         'sso',
         region_name='us-west-2',
     )
     self.sso_stub = Stubber(self.sso_client)
     self.profile = 'a-profile'
     self.scoped_config = {}
     self.full_config = {
         'profiles': {
             self.profile: self.scoped_config
         }
     }
     self.mock_session = mock.Mock(spec=Session)
     self.mock_session.get_scoped_config.return_value = self.scoped_config
     self.mock_session.full_config = self.full_config
     self.mock_session.create_client.return_value = self.sso_client
     self.mock_session.profile = self.profile
     self.config_path = '/some/path'
     self.session_config = {
         'config_file': self.config_path,
     }
     self.mock_session.get_config_variable = self.session_config.get
     self.mock_session.get_available_regions.return_value = ['us-east-1']
     self.token_cache = {}
     self.writer = mock.Mock(spec=ConfigFileWriter)
     self.prompter = mock.Mock(spec=PTKPrompt)
     self.selector = mock.Mock(spec=select_menu)
     self.configure_sso = ConfigureSSOCommand(
         self.mock_session,
         prompter=self.prompter,
         selector=self.selector,
         config_writer=self.writer,
         sso_token_cache=self.token_cache,
     )
     self.region = 'us-west-2'
     self.output = 'json'
     self.sso_region = 'us-east-1'
     self.start_url = 'https://d-92671207e4.awsapps.com/start'
     self.account_id = '0123456789'
     self.role_name = 'roleA'
     self.cached_token_key = '13f9d35043871d073ab260e020f0ffde092cb14b'
     self.expires_at = datetime.now(tzlocal()) + timedelta(hours=24)
     self.access_token = {
         'accessToken': 'access.token.string',
         'expiresAt': self.expires_at,
     }
     self.token_cache[self.cached_token_key] = self.access_token
Example #37
0
    def setUp(self):
        self.env_original = os.environ.copy()
        self.environ_copy = os.environ.copy()
        super(TestAssumeRoleCredentials, self).setUp()
        os.environ = self.environ_copy
        # The tests rely on manipulating AWS_CONFIG_FILE,
        # but we also need to make sure we don't accidentally
        # pick up the ~/.aws/credentials file either.
        os.environ['AWS_SHARED_CREDENTIALS_FILE'] = str(uuid4())
        self.parent_session = Session()
        self.iam = self.parent_session.create_client('iam')
        self.sts = self.parent_session.create_client('sts')
        self.tempdir = tempfile.mkdtemp()
        self.config_file = os.path.join(self.tempdir, 'config')

        # A role trust policy that allows the current account to call assume
        # role on itself.
        account_id = self.sts.get_caller_identity()['Account']
        self.role_policy = {
            "Version": "2012-10-17",
            "Statement": [
                {
                    "Effect": "Allow",
                    "Principal": {
                        "AWS": "arn:aws:iam::%s:root" % account_id
                    },
                    "Action": "sts:AssumeRole"
                }
            ]
        }
Example #38
0
 def setUp(self):
     self.environ = {
         'AWS_DATA_PATH': os.environ['AWS_DATA_PATH'],
         'AWS_DEFAULT_REGION': 'us-east-1',
         'AWS_ACCESS_KEY_ID': 'access_key',
         'AWS_SECRET_ACCESS_KEY': 'secret_key',
         'AWS_CONFIG_FILE': '',
     }
     self.environ_patch = mock.patch('os.environ', self.environ)
     self.environ_patch.start()
     emitter = HierarchicalEmitter()
     session = Session(EnvironmentVariables, emitter)
     session.register_component('data_loader', _LOADER)
     load_plugins({}, event_hooks=emitter)
     driver = CLIDriver(session=session)
     self.session = session
     self.driver = driver
Example #39
0
    def create_session(self, profile=None):
        session = Session(profile=profile)

        # We have to set bogus credentials here or otherwise we'll trigger
        # an early credential chain resolution.
        sts = session.create_client(
            'sts',
            aws_access_key_id='spam',
            aws_secret_access_key='eggs',
        )
        stubber = Stubber(sts)
        stubber.activate()
        assume_role_provider = AssumeRoleProvider(
            load_config=lambda: session.full_config,
            client_creator=lambda *args, **kwargs: sts,
            cache={},
            profile_name=profile,
            credential_sourcer=CanonicalNameCredentialSourcer([
                self.env_provider, self.container_provider,
                self.metadata_provider
            ])
        )

        component_name = 'credential_provider'
        resolver = session.get_component(component_name)
        available_methods = [p.METHOD for p in resolver.providers]
        replacements = {
            'env': self.env_provider,
            'iam-role': self.metadata_provider,
            'container-role': self.container_provider,
            'assume-role': assume_role_provider
        }
        for name, provider in replacements.items():
            try:
                index = available_methods.index(name)
            except ValueError:
                # The provider isn't in the session
                continue

            resolver.providers[index] = provider

        session.register_component(
            'credential_provider', resolver
        )
        return session, stubber
Example #40
0
class TestResetStreamOnRetry(unittest.TestCase):
    def setUp(self):
        super(TestResetStreamOnRetry, self).setUp()
        self.total_calls = 0
        self.auth = Mock()
        self.session = Session(include_builtin_handlers=False)
        self.service = Mock()
        self.service.endpoint_prefix = 's3'
        self.service.session = self.session
        self.endpoint = RestEndpoint(
            self.service, 'us-east-1', 'https://s3.amazonaws.com/',
            auth=self.auth)
        self.http_session = Mock()
        self.endpoint.http_session = self.http_session
        self.get_response_patch = patch('botocore.response.get_response')
        self.get_response = self.get_response_patch.start()
        self.retried_on_exception = None

    def tearDown(self):
        self.get_response_patch.stop()

    def max_attempts_retry_handler(self, attempts, **kwargs):
        # Simulate a max requests of 3.
        self.total_calls += 1
        if attempts == 3:
            return None
        else:
            # Returning anything non-None will trigger a retry,
            # but 0 here is so that time.sleep(0) happens.
            return 0

    def test_reset_stream_on_retry(self):
        # It doesn't really matter what the operation is, we will
        # check in general if we're
        self.session.register('needs-retry.s3.PutObject',
                              self.max_attempts_retry_handler)
        op = Mock()
        payload = Payload()
        payload.literal_value = RecordStreamResets('foobar')
        op.name = 'PutObject'
        op.http = {'uri': '', 'method': 'POST'}
        self.endpoint.make_request(op, {'headers': {}, 'payload': payload})
        self.assertEqual(self.total_calls, 3)
        self.assertEqual(payload.literal_value.total_resets, 2)
Example #41
0
 def factory(context, request):
     """
     :type context: object
     :type request: pyramid.request.Request
     :rtype: boto3.Session
     """
     session = None
     if cache is not None:
         session = getattr(cache, session_name, None)
     if session is None:
         core_session = None
         if core_settings:
             core_session = CoreSession()
             for k, v in core_settings.items():
                 core_session.set_config_variable(k, v)
         session = Session(botocore_session=core_session, **settings)
         if cache is not None:
             setattr(cache, session_name, session)
     return session
Example #42
0
def create_botocore_session(profile=None, debug=False,
                            connection_timeout=None,
                            read_timeout=None,
                            max_retries=None):
    # type: (OptStr, bool, OptInt, OptInt, OptInt) -> Session
    s = Session(profile=profile)
    _add_chalice_user_agent(s)
    if debug:
        _inject_large_request_body_filter()
    config_args = {}  # type: Dict[str, Any]
    if connection_timeout is not None:
        config_args['connect_timeout'] = connection_timeout
    if read_timeout is not None:
        config_args['read_timeout'] = read_timeout
    if max_retries is not None:
        config_args['retries'] = {'max_attempts': max_retries}
    if config_args:
        config = BotocoreConfig(**config_args)
        s.set_default_client_config(config)
    return s
Example #43
0
    def test_assume_role_uses_correct_region(self):
        config = (
            '[profile A]\n'
            'role_arn = arn:aws:iam::123456789:role/RoleA\n'
            'source_profile = B\n\n'
            '[profile B]\n'
            'aws_access_key_id = abc123\n'
            'aws_secret_access_key = def456\n'
        )
        self.write_config(config)
        session = Session(profile='A')
        # Verify that when we configure the session with a specific region
        # that we use that region when creating the sts client.
        session.set_config_variable('region', 'cn-north-1')

        create_client, expected_creds = self.create_stubbed_sts_client(session)
        session.create_client = create_client

        resolver = create_credential_resolver(session)
        provider = resolver.get_provider('assume-role')
        creds = provider.load()
        self.assert_creds_equal(creds, expected_creds)
        self.assertEqual(self.actual_client_region, 'cn-north-1')
Example #44
0
 def setUp(self):
     super(TestRetryInterface, self).setUp()
     self.total_calls = 0
     self.auth = Mock()
     self.session = Session(include_builtin_handlers=False)
     self.service = Mock()
     self.service.endpoint_prefix = "ec2"
     self.service.session = self.session
     self.endpoint = QueryEndpoint(self.service, "us-west-2", "https://ec2.us-west-2.amazonaws.com/", auth=self.auth)
     self.http_session = Mock()
     self.endpoint.http_session = self.http_session
     self.get_response_patch = patch("botocore.response.get_response")
     self.get_response = self.get_response_patch.start()
     self.retried_on_exception = None
Example #45
0
 def setUp(self):
     super(TestResetStreamOnRetry, self).setUp()
     self.total_calls = 0
     self.auth = Mock()
     self.session = Session(include_builtin_handlers=False)
     self.service = Mock()
     self.service.endpoint_prefix = 's3'
     self.service.session = self.session
     self.endpoint = RestEndpoint(
         self.service, 'us-east-1', 'https://s3.amazonaws.com/',
         auth=self.auth)
     self.http_session = Mock()
     self.endpoint.http_session = self.http_session
     self.get_response_patch = patch('botocore.response.get_response')
     self.get_response = self.get_response_patch.start()
     self.retried_on_exception = None
Example #46
0
    def setUp(self):
        super(TestAssumeRoleCredentials, self).setUp()
        self.environ = os.environ.copy()
        self.parent_session = Session()
        self.iam = self.parent_session.create_client('iam')
        self.sts = self.parent_session.create_client('sts')
        self.tempdir = tempfile.mkdtemp()
        self.config_file = os.path.join(self.tempdir, 'config')

        # A role trust policy that allows the current account to call assume
        # role on itself.
        account_id = self.sts.get_caller_identity()['Account']
        self.role_policy = {
            "Version": "2012-10-17",
            "Statement": [
                {
                    "Effect": "Allow",
                    "Principal": {
                        "AWS": "arn:aws:iam::%s:root" % account_id
                    },
                    "Action": "sts:AssumeRole"
                }
            ]
        }
Example #47
0
    def getEnvironment(self, profile=None):
        """Return environment variables that should be set for the profile."""
        eventHooks = HierarchicalEmitter()
        session = Session(event_hooks=eventHooks)

        if profile:
            session.set_config_variable('profile', profile)

        eventHooks.register('session-initialized',
                            inject_assume_role_provider_cache,
                            unique_id='inject_assume_role_cred_provider_cache')

        session.emit('session-initialized', session=session)
        creds = session.get_credentials()

        env = {}

        def set(key, value):
            if value:
                env[key] = value

        set('AWS_ACCESS_KEY_ID', creds.access_key)
        set('AWS_SECRET_ACCESS_KEY', creds.secret_key)

        # AWS_SESSION_TOKEN is the ostensibly the standard:
        # http://blogs.aws.amazon.com/security/post/Tx3D6U6WSFGOK2H/A-New-and-Standardized-Way-to-Manage-Credentials-in-the-AWS-SDKs
        # http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html#cli-environment
        set('AWS_SESSION_TOKEN', creds.token)

        # ...but boto expects AWS_SECURITY_TOKEN. Set both for compatibility.
        # https://github.com/boto/boto/blob/b016c07d834df5bce75141c4b9d2f3d30352e1b8/boto/connection.py#L438
        set('AWS_SECURITY_TOKEN', creds.token)

        set('AWS_DEFAULT_REGION', session.get_config_variable('region'))

        return env
Example #48
0
def _get_session():
    session = Session()
    session.set_credentials('foo', 'bar')
    session.set_config_variable('region', 'us-west-2')
    session.config_filename = 'no-exist-foo'
    return session
Example #49
0
class TestAssumeRoleCredentials(unittest.TestCase):
    def setUp(self):
        super(TestAssumeRoleCredentials, self).setUp()
        self.environ = os.environ.copy()
        self.parent_session = Session()
        self.iam = self.parent_session.create_client('iam')
        self.sts = self.parent_session.create_client('sts')
        self.tempdir = tempfile.mkdtemp()
        self.config_file = os.path.join(self.tempdir, 'config')

        # A role trust policy that allows the current account to call assume
        # role on itself.
        account_id = self.sts.get_caller_identity()['Account']
        self.role_policy = {
            "Version": "2012-10-17",
            "Statement": [
                {
                    "Effect": "Allow",
                    "Principal": {
                        "AWS": "arn:aws:iam::%s:root" % account_id
                    },
                    "Action": "sts:AssumeRole"
                }
            ]
        }

    def tearDown(self):
        super(TestAssumeRoleCredentials, self).tearDown()
        shutil.rmtree(self.tempdir)

    def random_name(self):
        return 'clitest-' + random_chars(10)

    def create_role(self, policy_document, policy_arn=None):
        name = self.random_name()
        response = self.iam.create_role(
            RoleName=name,
            AssumeRolePolicyDocument=json.dumps(policy_document)
        )
        self.addCleanup(self.iam.delete_role, RoleName=name)
        if policy_arn:
            self.iam.attach_role_policy(RoleName=name, PolicyArn=policy_arn)
            self.addCleanup(
                self.iam.detach_role_policy, RoleName=name,
                PolicyArn=policy_arn
            )
        return response['Role']

    def create_user(self, policy_arns):
        name = self.random_name()
        user = self.iam.create_user(UserName=name)['User']
        self.addCleanup(self.iam.delete_user, UserName=name)

        for arn in policy_arns:
            self.iam.attach_user_policy(
                UserName=name,
                PolicyArn=arn
            )
            self.addCleanup(
                self.iam.detach_user_policy,
                UserName=name, PolicyArn=arn
            )

        return user

    def create_creds(self, user_name):
        creds = self.iam.create_access_key(UserName=user_name)['AccessKey']
        self.addCleanup(
            self.iam.delete_access_key,
            UserName=user_name, AccessKeyId=creds['AccessKeyId']
        )
        return creds

    def wait_for_assume_role(self, role_arn, access_key, secret_key,
                             token=None, attempts=30, delay=10):
        # "Why not use the policy simulator?" you might ask. The answer is
        # that the policy simulator will return success far before you can
        # actually make the calls.
        client = self.parent_session.create_client(
            'sts', aws_access_key_id=access_key,
            aws_secret_access_key=secret_key, aws_session_token=token
        )
        attempts_remaining = attempts
        role_session_name = random_chars(10)
        while attempts_remaining > 0:
            attempts_remaining -= 1
            try:
                result = client.assume_role(
                    RoleArn=role_arn, RoleSessionName=role_session_name)
                return result['Credentials']
            except ClientError as e:
                code = e.response.get('Error', {}).get('Code')
                if code in ["InvalidClientTokenId", "AccessDenied"]:
                    time.sleep(delay)
                else:
                    raise

        raise Exception("Unable to assume role %s" % role_arn)

    def create_assume_policy(self, role_arn):
        policy_document = {
            "Version": "2012-10-17",
            "Statement": [
                {
                    "Effect": "Allow",
                    "Resource": role_arn,
                    "Action": "sts:AssumeRole"
                }
            ]
        }
        name = self.random_name()
        response = self.iam.create_policy(
            PolicyName=name,
            PolicyDocument=json.dumps(policy_document)
        )
        self.addCleanup(
            self.iam.delete_policy, PolicyArn=response['Policy']['Arn']
        )
        return response['Policy']['Arn']

    def assert_s3_read_only_profile(self, profile_name):
        # Calls to S3 should succeed
        command = 's3api list-buckets --profile %s' % profile_name
        result = aws(command, env_vars=self.environ)
        self.assertEqual(result.rc, 0, result.stderr)

        # Calls to other services should not
        command = 'iam list-groups --profile %s' % profile_name
        result = aws(command, env_vars=self.environ)
        self.assertNotEqual(result.rc, 0, result.stdout)
        self.assertIn('AccessDenied', result.stderr)

    def test_recursive_assume_role(self):
        # Create the final role, the one that will actually have access to s3
        final_role = self.create_role(self.role_policy, S3_READ_POLICY_ARN)

        # Create the role that can assume the final role
        middle_policy_arn = self.create_assume_policy(final_role['Arn'])
        middle_role = self.create_role(self.role_policy, middle_policy_arn)

        # Create a user that can only assume the middle-man role, and then get
        # static credentials for it.
        user_policy_arn = self.create_assume_policy(middle_role['Arn'])
        user = self.create_user([user_policy_arn])
        user_creds = self.create_creds(user['UserName'])

        # Setup the config file with the profiles we'll be using. For
        # convenience static credentials are placed here instead of putting
        # them in the credentials file.
        config = (
            '[default]\n'
            'aws_access_key_id = %s\n'
            'aws_secret_access_key = %s\n'
            '[profile middle]\n'
            'source_profile = default\n'
            'role_arn = %s\n'
            '[profile final]\n'
            'source_profile = middle\n'
            'role_arn = %s\n'
        )
        config = config % (
            user_creds['AccessKeyId'], user_creds['SecretAccessKey'],
            middle_role['Arn'], final_role['Arn']
        )
        with open(self.config_file, 'w') as f:
            f.write(config)

        # Wait for IAM permissions to propagate
        middle_creds = self.wait_for_assume_role(
            role_arn=middle_role['Arn'],
            access_key=user_creds['AccessKeyId'],
            secret_key=user_creds['SecretAccessKey'],
        )
        self.wait_for_assume_role(
            role_arn=final_role['Arn'],
            access_key=middle_creds['AccessKeyId'],
            secret_key=middle_creds['SecretAccessKey'],
            token=middle_creds['SessionToken'],
        )

        # Configure our credentials file to be THE credentials file
        self.environ['AWS_CONFIG_FILE'] = self.config_file

        self.assert_s3_read_only_profile(profile_name='final')

    def test_assume_role_with_credential_source(self):
        # Create a role with read access to S3
        role = self.create_role(self.role_policy, S3_READ_POLICY_ARN)

        # Create a user that can assume the role and get static credentials
        # for it.
        user_policy_arn = self.create_assume_policy(role['Arn'])
        user = self.create_user([user_policy_arn])
        user_creds = self.create_creds(user['UserName'])

        # Setup the config file with the profile we'll be using.
        config = (
            '[profile assume]\n'
            'role_arn = %s\n'
            'credential_source = Environment\n'
        )
        config = config % role['Arn']
        with open(self.config_file, 'w') as f:
            f.write(config)

        # Wait for IAM permissions to propagate
        self.wait_for_assume_role(
            role_arn=role['Arn'],
            access_key=user_creds['AccessKeyId'],
            secret_key=user_creds['SecretAccessKey'],
        )

        # Setup the environment so that our new config file is THE config
        # file and add the expected credentials since we're using the
        # environment as our credential source.
        self.environ['AWS_CONFIG_FILE'] = self.config_file
        self.environ['AWS_SECRET_ACCESS_KEY'] = user_creds['SecretAccessKey']
        self.environ['AWS_ACCESS_KEY_ID'] = user_creds['AccessKeyId']

        self.assert_s3_read_only_profile(profile_name='assume')
Example #50
0
 def profile_name(self, new_name):
     """set profile_name and refresh_boto_connections"""
     self._profile_name = new_name
     if new_name is not None:
         self.config = Session(profile=new_name).get_scoped_config()
     self.setup_session_and_refresh_connections()
Example #51
0
class BotoConnections(object):
    """Central Management of boto3 client and resource connection objects."""
    def __init__(self, region_name=None, profile_name=None):
        """
        Optionally pass region_name and profile_name. Setup boto3 session.
        Attach boto3 client and resource connection objects.
        """
        # defaults.
        self.config = {}
        self._region_name = self._profile_name = None
        # trigger region_name.setter
        self.region_name = region_name
        # trigger profile_name.setter
        self.profile_name = profile_name

    @property
    def profile_name(self):
        return self._profile_name

    @profile_name.setter
    def profile_name(self, new_name):
        """set profile_name and refresh_boto_connections"""
        self._profile_name = new_name
        if new_name is not None:
            self.config = Session(profile=new_name).get_scoped_config()
        self.setup_session_and_refresh_connections()

    @property
    def region_name(self):
        if self._region_name is None:
            return self.config.get('region', None)
        return self._region_name

    @region_name.setter
    def region_name(self, new_name):
        """set region_name and refresh_boto_connections"""
        self._region_name = new_name
        self.setup_session_and_refresh_connections()

    def setup_session_and_refresh_connections(self):
        if self.profile_name and self.region_name:
            boto3.setup_default_session(
              profile_name = self.profile_name,
              region_name  = self.region_name,
            )
        elif self.profile_name:
            boto3.setup_default_session(profile_name = self.profile_name)
        elif self.region_name:
            boto3.setup_default_session(region_name = self.region_name)
        else:
            return None
        self.refresh_boto_connections()

    def refresh_boto_connections(self):
        """Attach related Boto3 clients and resources."""
        self.iam = boto3.resource('iam')
        self.ec2 = boto3.resource('ec2')
        self.ec2_client = boto3.client('ec2')
        self.rds = boto3.client('rds')
        self.elasticache = boto3.client('elasticache')
        self.elb = boto3.client('elb')
        self.autoscaling = boto3.client('autoscaling')
        self.route53 = boto3.client('route53')
        
    @property
    def azones(self):
        """Return a list of available AZ names for active AWS profile/region."""
        az_filter = make_filter('state', 'available')
        azs = self.ec2_client.describe_availability_zones(Filters=az_filter)
        return map(lambda az : az['ZoneName'], azs['AvailabilityZones'])
Example #52
0
class TestRetryInterface(BaseSessionTest):
    def setUp(self):
        super(TestRetryInterface, self).setUp()
        self.total_calls = 0
        self.auth = Mock()
        self.session = Session(include_builtin_handlers=False)
        self.service = Mock()
        self.service.endpoint_prefix = 'ec2'
        self.service.session = self.session
        self.endpoint = QueryEndpoint(
            self.service, 'us-west-2', 'https://ec2.us-west-2.amazonaws.com/',
            auth=self.auth)
        self.http_session = Mock()
        self.endpoint.http_session = self.http_session
        self.get_response_patch = patch('botocore.response.get_response')
        self.get_response = self.get_response_patch.start()
        self.retried_on_exception = None

    def tearDown(self):
        self.get_response_patch.stop()

    def max_attempts_retry_handler(self, attempts, **kwargs):
        # Simulate a max requests of 3.
        self.total_calls += 1
        if attempts == 3:
            return None
        else:
            # Returning anything non-None will trigger a retry,
            # but 0 here is so that time.sleep(0) happens.
            return 0

    def connection_error_handler(self, attempts, caught_exception, **kwargs):
        self.total_calls += 1
        if attempts == 3:
            return None
        elif isinstance(caught_exception, ConnectionError):
            # Returning anything non-None will trigger a retry,
            # but 0 here is so that time.sleep(0) happens.
            return 0
        else:
            return None

    def test_retry_events_are_emitted(self):
        emitted_events = []
        self.session.register('needs-retry.ec2.DescribeInstances',
                              lambda **kwargs: emitted_events.append(kwargs))
        op = Mock()
        op.name = 'DescribeInstances'
        self.endpoint.make_request(op, {})
        self.assertEqual(len(emitted_events), 1)
        self.assertEqual(emitted_events[0]['event_name'],
                         'needs-retry.ec2.DescribeInstances')

    def test_retry_events_can_alter_behavior(self):
        self.session.register('needs-retry.ec2.DescribeInstances',
                              self.max_attempts_retry_handler)
        op = Mock()
        op.name = 'DescribeInstances'
        self.endpoint.make_request(op, {})
        self.assertEqual(self.total_calls, 3)

    def test_retry_on_socket_errors(self):
        self.session.register('needs-retry.ec2.DescribeInstances',
                              self.connection_error_handler)
        op = Mock()
        op.name = 'DescribeInstances'
        self.http_session.send.side_effect = ConnectionError()
        self.endpoint.make_request(op, {})
        self.assertEqual(self.total_calls, 3)