def session(self): with self._acquire_session: if not self._session: session = None if self.resource.mfa_serial: cache_key = '_'.join((self.resource.access_key_id, self.resource.mfa_serial)) if cache_key in self.cache: session = Session.fromjson(self.cache[cache_key]) if not session or session.expiration <= now(): creds = self.base_client.get_session_token( SerialNumber=self.resource.mfa_serial, TokenCode=self.ui.prompt( 'Please enter a token for MFA device {}'.format(self.resource.mfa_serial), key=self.resource.mfa_serial, ), )['Credentials'] session = Session( access_key_id=creds['AccessKeyId'], secret_access_key=creds['SecretAccessKey'], session_token=creds['SessionToken'], expiration=creds['Expiration'], region=self.resource.region, ) self.cache[cache_key] = session.tojson() else: session = self.base_session self._session = session self._session.region = self.resource.region return self._session
def test_tail(self): goal = self.create_goal('tail') bucket = self.aws.get_bucket(name='my-log-bucket') distribution = self.fixtures.enter_context(DistributionStubber( goal.get_service( self.aws.get_distribution( name='www.example.com', logging={ 'enabled': True, 'bucket': bucket, 'prefix': '/my/prefix', }, ), 'tail', ) )) distribution.add_response( 'list_objects', service_response={ 'Contents': [{ 'Key': 'log-chunk-1.gz', 'LastModified': now(), }] }, expected_params={ 'Bucket': 'my-log-bucket', 'Prefix': '/my/prefix', } ) buf = six.BytesIO() with gzip.GzipFile(fileobj=buf, mode='wb') as f: f.write(b'line1\nline2\nline3\n') log_chunk = buf.getvalue() distribution.add_response( 'get_object', service_response={ 'Body': StreamingBody( six.BytesIO(log_chunk), len(log_chunk), ), }, expected_params={ 'Bucket': 'my-log-bucket', 'Key': 'log-chunk-1.gz', } ) goal.execute('www.example.com', None, None, False)
def add_get_session_token(self): return self.add_response( 'get_session_token', service_response={ 'Credentials': { 'AccessKeyId': 'AKIMFAGETSESSIONMFAGETSESSION', 'SecretAccessKey': 'abcdefghijklmnopqrstuvwxyzmfa', 'SessionToken': 'zyxwvutsrqpnomlkjihgfedcbamfa', 'Expiration': now(), }, }, expected_params={ 'SerialNumber': 'mymfaserial', 'TokenCode': '123456', } )
def check(self, target): self.old_db_name = '{}-{:%Y%m%d%H%M%S}'.format(self.resource.name, now()) self.db = self.get_database(self.resource.name) if not self.db: raise errors.Error('Database {} not found?'.format(self.resource.name)) if self.get_database(self.old_db_name): raise errors.Error('Database {} already exists - restore in progress?'.format(self.old_db_name)) self.datetime_target = None try: self.datetime_target = parse_datetime(target) self.check_point_in_time(self.db, self.datetime_target) except errors.Error: self.check_snapshot(self.db, target)
def test_tail_once(self): goal = self.create_goal('tail') log_group = self.fixtures.enter_context(LogGroupStubber( goal.get_service( self.aws.add_log_group( name='test-log_group', ), 'tail', ) )) log_group.add_response( 'filter_log_events', service_response={ 'events': [{ 'eventId': 'EV1', 'timestamp': 0, 'logStreamName': 'logstream1', 'message': 'this is my message', }], 'nextToken': 'nextToken1', }, expected_params={ 'logGroupName': log_group.resource.name, 'startTime': ANY, 'endTime': ANY, } ) log_group.add_response( 'filter_log_events', service_response={ 'events': [{ 'eventId': 'EV1', 'timestamp': 0, 'logStreamName': 'logstream1', 'message': 'this is my message', }, { 'eventId': 'EV2', 'timestamp': 0, 'logStreamName': 'logstream1', 'message': 'this is another message', }], }, expected_params={ 'logGroupName': log_group.resource.name, 'nextToken': 'nextToken1', 'startTime': ANY, 'endTime': ANY, } ) echo = self.fixtures.enter_context(mock.patch.object(goal.ui, 'echo')) # Call with both a naive and non-naive datetime # The value is ignored but will at least trigger the dt handling # codepaths goal.execute( 'test-log_group', start=datetime.datetime.now(), end=now(), ) # Assert that pagination works and that de-duplication works echo.assert_has_calls([ mock.call('[1970-01-01 00:00:00] [logstream1] this is my message'), mock.call('[1970-01-01 00:00:00] [logstream1] this is another message'), ])
def is_stale(self, server_certificate): if server_certificate['Expiration'] >= datetime.now(): # Don't delete valid certificates return False return super(Apply, self).is_stale(server_certificate)