def iris_infrastructure(): mock_sqs().start() mock_sns().start() sns_res = boto3.resource('sns', region_name=test_iris_settings.AWS_REGION) sqs_res = boto3.resource('sqs', region_name=test_iris_settings.AWS_REGION) topic = sns_res.create_topic(Name=test_iris_settings.IRIS_SNS_TOPIC) queue = sqs_res.create_queue( QueueName=test_iris_settings.IRIS_SQS_APP_QUEUE) queue_arn = queue.attributes['QueueArn'] topic_arn = topic.arn sns_res.meta.client.subscribe(TopicArn=topic_arn, Protocol='sqs', Endpoint=queue_arn) yield { 'sns_res': sns_res, 'sqs_res': sqs_res, 'topic': topic, 'queue': queue, 'topic_arn': topic_arn, 'queue_arn': queue_arn } mock_sns().stop() mock_sqs().stop()
def initialize_environment_smoke_test(discover_source_buckets_smoke_test): mock_sqs().start() queue_arn_parts = config.DeploymentDetails.sqs_arn.split(sep=':') queue_name = queue_arn_parts[len(queue_arn_parts) - 1] sqs_client = awshelper.get_client(awshelper.ServiceName.sqs) sqs_client.create_queue(QueueName=queue_name) s3_client = awshelper.get_client(awshelper.ServiceName.s3) s3_client.create_bucket( Bucket=TestValues.regional_inventory_destination_bucket_name, CreateBucketConfiguration={ 'LocationConstraint': 'us-west-2'}) body = bytes('hello world', 'utf-8') s3_client.put_object( Bucket=TestValues.regional_inventory_destination_bucket_name, Key=TestValues.sample_inventory_object_key, Body=body) s3_client.put_object( Bucket=TestValues.regional_inventory_destination_bucket_name, Key=TestValues.sample_inventory_manifest_key, Body=body) yield discover_source_buckets_smoke_test mock_sqs().stop()
def queue(): """ Return a mocked SQS Queue object, save its URL to an environment variable. """ mock_sqs().start() queue_url = boto3.client('sqs').create_queue(QueueName='test')['QueueUrl'] os.environ['SQS_URL'] = queue_url yield boto3.resource('sqs').Queue(queue_url) mock_sqs().stop()
def moto_start(set_region): mock_autoscaling().start() mock_ec2().start() mock_ecs().start() mock_sns().start() mock_sqs().start() yield mock_autoscaling().stop() mock_ec2().stop() mock_ecs().stop() mock_sns().stop() mock_sqs().stop()
def mock_aws_configs(): mock = mock_sqs() mock.start() region = "ap-southeast-2" sqs = boto3.resource("sqs", region_name=region) # pylint: disable=maybe-no-member queue = sqs.create_queue(QueueName="test-queue") queue.send_message( MessageBody="""{ "Records": [{ "Sns": { "Message": "{\\"Records\\":[{\\"eventVersion\\":\\"2.1\\",\\"eventSource\\":\\"aws:s3\\",\\"awsRegion\\":\\"ap-southeast-2\\",\\"eventTime\\":\\"2019-09-03T14:38:13.181Z\\",\\"eventName\\":\\"ObjectCreated:Put\\",\\"userIdentity\\":{\\"principalId\\":\\"AWS:AROAJ2EX5FOYCOFZZBQMI:projector\\"},\\"requestParameters\\":{\\"sourceIPAddress\\":\\"13.239.35.38\\"},\\"responseElements\\":{\\"x-amz-request-id\\":\\"180C8AB8E33CF7EA\\",\\"x-amz-id-2\\":\\"FhU4Sb2ZhYiSEW+5MKqUU+EYax31OZS8Lf6AtbIOMSyuWeknhTNMHzCTF8QbWCSe8/84tWqH+JM=\\"},\\"s3\\":{\\"s3SchemaVersion\\":\\"1.0\\",\\"configurationId\\":\\"27b8c781-fe54-42e9-8f68-49ae10a62845\\",\\"bucket\\":{\\"name\\":\\"seek-apply-projections-prod\\",\\"ownerIdentity\\":{\\"principalId\\":\\"AVJWBGAX8UAFM\\"},\\"arn\\":\\"arn:aws:s3:::my-app-bucket\\"},\\"object\\":{\\"key\\":\\"822167373.json\\",\\"size\\":6039,\\"eTag\\":\\"1b8938b6028c1bceb39fb0ad4d2c3b26\\",\\"versionId\\":\\"tZW5WAAtsYSmcu1DA0tDX5r_LPrPxcEg\\",\\"sequencer\\":\\"005D6E7AD523036FB8\\"}}}]}" } }] }""", MessageAttributes={ "ErrorMessage": { "StringValue": "RequestId: 393e79a4-cee5-423f-8273-8ea10f1a1fc6 Process exited before completing request", "DataType": "String", } }, ) return { "region": region, "queue_url": queue.url, "printer": "[(.Body | fromjson | .Records[].Sns.Message | fromjson | .Records[].s3.object.key), .MessageAttributes.ErrorMessage.StringValue] | @tsv", }
def start_mocking(self): """Method to start mocking""" self.mock = True self.mock_s3 = mock_s3() self.mock_sqs = mock_sqs() self.mock_s3.start() self.mock_sqs.start()
def delete_queue(self, queue_name): """Method to delete a test sqs""" if self.mock: with mock_sqs(): self._delete_queue(queue_name) else: self._delete_queue(queue_name)
def setUp(self): super().setUp() # Setup mock AWS self.s3_mock = mock_s3() self.s3_mock.start() self.iam_mock = mock_iam() self.iam_mock.start() self.sqs_mock = mock_sqs() self.sqs_mock.start() if self.deployment_stage == 'local': # When online, we need STS to access SecretsManager to access RDS. # When offline, mock out STS/SecretsManager and use local Postgres. # STS self.sts_mock = mock_sts() self.sts_mock.start() # UploadDbConfig self.upload_db_config = UploadDbConfig() self.upload_db_config.set({ 'database_uri': 'postgresql://:@localhost/upload_local', 'pgbouncer_uri': 'postgresql://:@localhost/upload_local' }) # Upload Bucket self.upload_bucket = boto3.resource('s3').Bucket( self.upload_config.bucket_name) self.upload_bucket.create() self.sqs = boto3.resource('sqs') self.sqs.create_queue(QueueName=f"bogo_url") self.sqs.create_queue(QueueName=f"delete_sqs_url") self.sqs.create_queue(QueueName=f"test_validation_q_url")
def queue_url(): with mock_sqs(): client = boto3.client('sqs') resp = client.create_queue(QueueName='TestQueue') logger.info('%r', resp) yield resp['QueueUrl'] client.delete_queue(QueueUrl=resp['QueueUrl'])
def game_queue(aws_credentials): with mock_sqs(): from common.adapters.sqs_game_queue import SQSGameQueueAdapter sqs = boto3.resource('sqs', 'ca-central-1') sqs.create_queue(QueueName='TestQueue') yield SQSGameQueueAdapter("TestQueue")
def test_context_manager_returns_mock(): with mock_sqs() as sqs_mock: conn = boto3.client("sqs", region_name="us-west-1") conn.create_queue(QueueName="queue1") if not settings.TEST_SERVER_MODE: list(sqs_mock.backends["us-west-1"].queues.keys()).should.equal(["queue1"])
def test_sqsfeedstorage_and_sqsexporter(): with mock_sqs(): sqs = boto3.resource('sqs', region_name='eu-central-1') queue = sqs.create_queue(QueueName='bar') storage = sqsfeedexport.SQSFeedStorage('sqs://bar') assert storage.queue_name == 'bar' deck = storage.open() exporter = sqsfeedexport.SQSExporter(deck) # do the `scrapy.extensions.feedexport.FeedExporter` song and dance exporter.start_exporting() for item in examples: exporter.export_item(item) exporter.finish_exporting() assert len(deck) == 6 # call the private method directly to avoid the deferToThread call storage._store_in_thread(deck) # now check what we've got messages = queue.receive_messages(MaxNumberOfMessages=10, MessageAttributeNames=['All']) for index in xrange(6): assert messages[index].body == 'ScrapyItem' assert messages[index].message_attributes == \ sqsfeedexport.translate_item_to_message(examples[index])['MessageAttributes'] assert len(messages) == 6
def all_queues(self, verbose=True): redis_client = StrictRedis(host=os.getenv("DEFERRABLE_TEST_REDIS_HOST", "redis"), db=15) factory = DocketsBackendFactory(redis_client, wait_time=0) backend = factory.create_backend_for_group('testing') if verbose: print "Testing Dockets Queue..." yield backend.queue if verbose: print "Testing Dockets Error Queue..." yield backend.error_queue backend = InMemoryBackendFactory().create_backend_for_group('testing') if verbose: print "Testing Memory Queue..." yield backend.queue fake_sqs = mock_sqs() fake_sqs.start() factory = SQSBackendFactory(lambda: SQSConnection(), wait_time=None) backend = factory.create_backend_for_group('testing') if verbose: print "Testing SQS Queue with lazy connection thunk..." yield backend.queue fake_sqs.stop()
def setUp(self): super(TestSQSAccumulator, self).setUp() self.mock = moto.mock_sqs() self.mock.start() self.queue = boto.connect_sqs().create_queue("test-queue") self.accumulator = accumulator.SQSAccumulator('test-queue')
def sqs_queue_by_name(): qname = "test-sqs" with mock_sqs(): sqs = boto3.resource("sqs") sqs.create_queue(QueueName=qname) yield qname
def queue(aws_credentials): with mock_sqs(): sqs = boto3.resource("sqs", region_name="eu-west-1") queue = sqs.create_queue(QueueName="uk-metoffice-nwp") os.environ["SQS_URL"] = queue.url yield queue
def boto3_sqs(): os.environ['AWS_ACCESS_KEY_ID'] = 'testing' os.environ['AWS_SECRET_ACCESS_KEY'] = 'testing' os.environ['AWS_SECURITY_TOKEN'] = 'testing' os.environ['AWS_SESSION_TOKEN'] = 'testing' with mock_sqs(): boto3_client = boto3.client('sqs', region_name='us-east-1') response = boto3_client.create_queue(QueueName='SQS_QUEUE_NAME', Attributes={ 'DelaySeconds': '60', 'MessageRetentionPeriod': '600' }) queue_url = response['QueueUrl'] response = boto3_client.send_message( QueueUrl=queue_url, DelaySeconds=10, MessageAttributes={ 'Website': { 'DataType': 'String', 'StringValue': 'https://www.instana.com' }, }, MessageBody=('Monitor any application, service, or request ' 'with Instana Application Performance Monitoring')) return Response(response)
def _handle_sqs_queue(resources): from moto import mock_sqs sqs = mock_sqs() def before(): sqs.start() for resource_definition in resources: boto3.resource("sqs").create_queue( QueueName=resource_definition["Properties"]["QueueName"] ) def after(): sqs_client = boto3.client("sqs") for resource_definition in resources: sqs_client.delete_queue( QueueUrl=sqs_client.get_queue_url( QueueName=resource_definition["Properties"]["QueueName"] )["QueueUrl"] ) sqs.stop() return before, after
def setup(self): """Add a fake message to the queue.""" self.mock_sqs = mock_sqs() self.mock_sqs.start() sqs = boto3.resource('sqs') config = load_config('tests/unit/conf/') prefix = config['global']['account']['prefix'] name = StreamAlertSQSClient.DEFAULT_QUEUE_NAME.format(prefix) self.queue = sqs.create_queue(QueueName=name) # Create a fake s3 notification message to send bucket = 'unit-testing.streamalerts' test_s3_notification = { 'Records': [ { 'eventVersion': '2.0', 'eventSource': 'aws:s3', 'awsRegion': 'us-east-1', 'eventTime': '2017-08-07T18:26:30.956Z', 'eventName': 'S3:PutObject', 'userIdentity': { 'principalId': 'AWS:AAAAAAAAAAAAAAA' }, 'requestParameters': { 'sourceIPAddress': '127.0.0.1' }, 'responseElements': { 'x-amz-request-id': 'FOO', 'x-amz-id-2': 'BAR' }, 's3': { 's3SchemaVersion': '1.0', 'configurationId': 'queue', 'bucket': { 'name': bucket, 'ownerIdentity': { 'principalId': 'AAAAAAAAAAAAAAA' }, 'arn': 'arn:aws:s3:::{}'.format(bucket) }, 'object': { 'key': ('alerts/dt=2017-08-2{}-14-02/rule_name_alerts-' '1304134918401.json'.format(day)), 'size': 1494, 'eTag': '12214134141431431', 'versionId': 'asdfasdfasdf.dfadCJkj1', 'sequencer': '1212312321312321321' } } } for day in {6, 7} ] } self.queue.send_message(MessageBody=json.dumps(test_s3_notification)) self.client = StreamAlertSQSClient(config)
def create_mock_sqs(): with mock_sqs(): sqs = boto3.resource("sqs", region_name="us-east-1") queue = sqs.create_queue(QueueName="test", Attributes={"DelaySeconds": "0"}) queue.send_message(MessageBody=json.dumps(fake_message1)) queue.send_message(MessageBody=json.dumps(fake_message2)) yield
def create_upload_queue(self, queue_name): """Method to create a test sqs for uploading tiles for the ingest""" if self.mock: url = mock_sqs(self._create_upload_queue(queue_name)) else: url = self._create_upload_queue(queue_name) time.sleep(30) return url
def create_flush_queue(self, queue_name): """Method to create a test sqs for flushing cubes""" if self.mock: url = mock_sqs(self._create_flush_queue(queue_name)) else: url = self._create_flush_queue(queue_name) time.sleep(10) return url
def sqs(aws_credentials): """Mocked SQS Fixture.""" from consoleme.config import config with mock_sqs(): yield boto3.client( "sqs", region_name="us-east-1", **config.get("boto3.client_kwargs", {}) )
def start(): """ Entrypoint for mocking SQS. :return: nothing """ # start SQS mocking with moto mock = mock_sqs() mock.start()
def create_queue(self, queue_name): """Method to create a test sqs""" if self.mock: with mock_sqs(): url = self._create_queue(queue_name) else: url = self._create_queue(queue_name) time.sleep(30) return url
def setup(self, boto_patch): """Setup the AthenaRefresher tests""" self.mock_sqs = mock_sqs() self.mock_sqs.start() boto_patch.client.return_value = MockAthenaClient() sqs = boto3.resource('sqs') name = StreamAlertSQSClient.DEFAULT_QUEUE_NAME.format('unit-testing') self.queue = sqs.create_queue(QueueName=name) self._refresher = AthenaRefresher()
def setUp(self): self.boto_sqs_patcher = mock_sqs() self.MockClass = self.boto_sqs_patcher.start() mock_connection = boto.connect_sqs() mock_connection.create_queue('bang-queue') mock_connection.create_queue('bang-response') boto.sqs.connect_to_region = MagicMock(name="mock_connect_to_sqs", return_value=mock_connection) self.sqslistener = SQSListener(listener_config_path=LISTENER_CONFIG_PATH)
def initialize_job_queue(): """ Create an job queue queue to perform test. Returns client and name of bucket """ queue_name = "pytest_sqs_job_queue" region_name = "us-west-2" with mock_sqs(): sqs_client = client("sqs", region_name=region_name) sqs_client.create_queue(QueueName=queue_name) yield sqs_client, queue_name, region_name
def sqs_queue(boto_session, mocked_sns, settings): with mock_sqs(): sqs = boto_session.resource('sqs') queue = sqs.create_queue(QueueName='dummy-queue') mocked_sns.subscribe( TopicArn=settings.SNS_ARN, Protocol='sqs', Endpoint=queue.attributes['QueueArn'] ) yield queue
def sqs(aws_credentials): mock = mock_sqs() mock.start() sqs_client = boto3.client('sqs', 'us-west-2') queue_name = "test-resource-queue" queue_url = sqs_client.create_queue( QueueName=queue_name )['QueueUrl'] yield (sqs_client, queue_url) mock.stop()
def sqs(self, scope="session", autouse=True): mock = mock_sqs() mock.start() # There is currently a bug on moto, this line is needed as a workaround # Ref: https://github.com/spulec/moto/issues/1926 boto3.setup_default_session() sqs_client = boto3.client('sqs', 'us-west-2') queue_name = "test-scan-queue" queue_url = sqs_client.create_queue(QueueName=queue_name)['QueueUrl'] yield (sqs_client, queue_url) mock.stop()
def test_missing_response_queue(self): boto_sqs_patcher = mock_sqs() mockClass = boto_sqs_patcher.start() mock_connection = boto.connect_sqs() mock_connection.create_queue('response-queue') boto.sqs.connect_to_region = MagicMock(name="mock_connect_to_sqs", return_value=mock_connection) with self.assertRaises(MissingQueueException): self.sqslistener = SQSListener(listener_config_path=LISTENER_CONFIG_PATH) boto_sqs_patcher.stop()
def setUp(self): self.sns_mock = moto.mock_sns() self.sns_mock.start() self.sqs_mock = moto.mock_sqs() self.sqs_mock.start()
def sqs_client(self): with mock_sqs(): client = boto3.client('sqs', region_name="eu-west-1") yield client
def start_mocking(self): """Method to start mocking""" self.mock = True self.mock_sqs = mock_sqs() self.mock_sqs.start()
def delete_upload_queue(self, queue_name): """Method to delete a test sqs for uploading tiles for the ingest""" if self.mock: mock_sqs(self._delete_upload_queue(queue_name)) else: self._delete_upload_queue(queue_name)
def decorate(cls, func): return moto.mock_sqs(moto.mock_sns(responses.activate(func)))
def sqs(request): mock = moto.mock_sqs() mock.start() sqs = boto3.resource('sqs', region_name='us-east-1') request.addfinalizer(mock.stop) return sqs