def test_get_canonical_ids(self):
        accounts = Account.query.all()
        get_canonical_ids(accounts)

        for account in accounts:
            assert len(account.custom_fields) == 1
            assert account.custom_fields[0].name == "canonical_id"
            assert account.custom_fields[0].value == "bcaf1ffd86f41161ca5fb16fd081034f"  # Default from moto.

            # Make it something else to test overrides:
            account.custom_fields[0].value = "replaceme"
            db.session.add(account)

        db.session.commit()

        # Test without override (nothing should be changed):
        get_canonical_ids(accounts)
        for account in accounts:
            assert len(account.custom_fields) == 1
            assert account.custom_fields[0].name == "canonical_id"
            assert account.custom_fields[0].value == "replaceme"

        # Test override:
        get_canonical_ids(accounts, override=True)
        for account in accounts:
            assert len(account.custom_fields) == 1
            assert account.custom_fields[0].name == "canonical_id"
            assert account.custom_fields[0].value == "bcaf1ffd86f41161ca5fb16fd081034f"  # Default from moto.

        mock_sts().stop()
        mock_s3().stop()
Example #2
0
    def pre_test_setup(self):
        account_type_result = AccountType.query.filter(AccountType.name == 'AWS').first()
        if not account_type_result:
            account_type_result = AccountType(name='AWS')
            db.session.add(account_type_result)
            db.session.commit()

        self.account = Account(identifier="012345678910", name="testing",
                               account_type_id=account_type_result.id)
        self.technology = Technology(name="s3")
        self.item = Item(region="us-west-2", name="somebucket",
                         arn="arn:aws:s3:::somebucket", technology=self.technology,
                         account=self.account)

        db.session.add(self.account)
        db.session.add(self.technology)
        db.session.add(self.item)

        db.session.commit()

        mock_s3().start()
        client = boto3.client("s3")
        client.create_bucket(Bucket="somebucket")
        client.create_bucket(Bucket="someotherbucket")
        client.create_bucket(Bucket="someotherbucket2")
Example #3
0
def s3_resource(tips_file):
    pytest.importorskip('s3fs')
    moto.mock_s3().start()

    test_s3_files = [
        ('tips.csv', tips_file),
        ('tips.csv.gz', tips_file + '.gz'),
        ('tips.csv.bz2', tips_file + '.bz2'),
    ]

    def add_tips_files(bucket_name):
        for s3_key, file_name in test_s3_files:
            with open(file_name, 'rb') as f:
                conn.Bucket(bucket_name).put_object(
                    Key=s3_key,
                    Body=f)

    boto3 = pytest.importorskip('boto3')
    # see gh-16135
    bucket = 'pandas-test'

    conn = boto3.resource("s3", region_name="us-east-1")
    conn.create_bucket(Bucket=bucket)
    add_tips_files(bucket)

    conn.create_bucket(Bucket='cant_get_it', ACL='private')
    add_tips_files('cant_get_it')

    yield conn

    moto.mock_s3().stop()
Example #4
0
 def setUp(self):
     mock_s3().start()
     patchers = [
         "autopush.main.task",
         "autopush.main.reactor",
         "autopush.settings.TwistedMetrics",
     ]
     self.mocks = {}
     for name in patchers:
         patcher = patch(name)
         self.mocks[name] = patcher.start()
    def setUp(self):
        self.s3 = mock_s3()
        self.s3.start()
        boto = connect_s3()
        boto.create_bucket(self._bucket)

        super(MPConnectionTest, self).setUp()
Example #6
0
    def setUp(self):
        self.mock = moto.mock_s3()
        self.mock.start()
        self.conn = boto.connect_s3()
        self.conn.create_bucket(TEST_BUCKET_NAME)
        pyramid = Pyramid(stride=8)
        grid_image = os.path.join(DATA_DIRECTORY, 'grid_crop', 'grid.png')

        metatile = MetaTile(MetaTileIndex(19, 453824, 212288, 8),
                            data=open(grid_image, 'rb').read(),
                            mimetype='image/png')
        format = FormatBundle(MapType('image'), TileFormat('PNG'))

        storage = S3MetaTileStorage(levels=pyramid.levels,
                                    stride=pyramid.stride,
                                    bucket=TEST_BUCKET_NAME,
                                    prefix='testlayer',
                                    format=format)
        storage.put(metatile)

        self.node = S3StorageNode('s3', maptype='image',
                                  tileformat=dict(format='PNG'),
                                  levels=pyramid.levels,
                                  stride=pyramid.stride,
                                  bucket=TEST_BUCKET_NAME,
                                  prefix='testlayer')
        self.expected = grid_image
 def setUp(self):
     super(TestCloudFrontS3Storage, self).setUp()
     self.s3_mock = mock_s3()
     self.s3_mock.start()
     self.settings = {
         'storage.bucket': 'mybucket',
         'storage.access_key': 'abc',
         'storage.secret_key': 'bcd',
         'storage.cloud_front_domain': 'https://abcdef.cloudfront.net',
         'storage.cloud_front_key_file': '',
         'storage.cloud_front_key_string': '-----BEGIN RSA PRIVATE KEY-----\n'
                                           'MIICXQIBAAKBgQDNBN3WHzIgmOEkBVNdBlTR7iGYyUXDVuFRkJlYp/n1/EZf2YtE\n'
                                           'BpxJAgqdwej8beWV16QXOnKXQpsGAeu7x2pvOGFyRGytmLDeUXayfIF/E46w83V2\n'
                                           'r53NOBrlezagqCAz9uafocyNaXlxZfp4tx82sEmpSmHGwd//+n6zgXNC0QIDAQAB\n'
                                           'AoGAd5EIA1GMPYCLhSNp+82ueARGKcHwYrzviU8ob5D/cVtge5P26YRlbxq2sEyf\n'
                                           'oWBCTgJGW5mlrNuWZ4mFPq1NP2X2IU80k/J67KOuOelAykIVQw6q6GAjtmh40x4N\n'
                                           'EekoFzxVqoFKqWOJ1UNP0jNOyfzxU5dfzvw5GOEXob9usjECQQD3++wWCoq+YRCz\n'
                                           '8qqav2M7leoAnDwmCYKpnugDU0NR61sZADS3kJHnhXAbPFQI4dRfETJOkKE/iDph\n'
                                           'G0Rtdfm1AkEA06VoI49wjEMYs4ah3qwpvhuVyxVa9iozIEoDYiVCOOBZw8rX79G4\n'
                                           '+5yzC9ehy9ugWttSA2jigNXVB6ORN3+mLQJBAM47lZizBbXUdZahvp5ZgoZgY65E\n'
                                           'QIWFrUOxYtS5Hyh2qlk9YZozwhOgVp5f6qdEYGD7pTHPeDqk6aAulBbQYW0CQQC4\n'
                                           'hAw2dGd64UQ3v7h/mTkLNKFzXDrhQgkwrVYlyrXhQDcCK2X2/rB3LDYsrOGyCNfU\n'
                                           'XkEyF87g44vGDSQdbnxBAkA1Y+lB/pqdyHMv5RFabkBvU0yQDfekAKHeQ6rS+21g\n'
                                           'dWedUVc1JNnKtb8W/rMfdjg9YLYqUTvoBvp0DjfwdYc4\n'
                                           '-----END RSA PRIVATE KEY-----',
         'storage.cloud_front_key_id': 'key-id'
     }
     conn = boto.connect_s3()
     self.bucket = conn.create_bucket('mybucket')
     patch.object(CloudFrontS3Storage, 'test', True).start()
     kwargs = CloudFrontS3Storage.configure(self.settings)
     self.storage = CloudFrontS3Storage(MagicMock(), **kwargs)
Example #8
0
 def test_unpublish_cmd(self):
     with mock_s3():
         conn = boto.connect_s3()
         bucket = conn.create_bucket(settings.AWS_BUCKET_NAME)
         call_command("build")
         call_command("unpublish", no_pooling=True, verbosity=3)
         self.assertFalse(list(key for key in bucket.list()))
Example #9
0
def mock_aws_services():
    mock = moto.mock_s3()
    mock.start()

    yield

    mock.stop()
 def test_S3_communication(self):
     'test downloading/uploading from/to S3'
     from utils import s3_upload_file
     from utils import s3_download_file
     # Make sure the backup file still exists
     current_dir = os.path.dirname(os.path.realpath(__file__))
     backup_file = '%s/adsabs_consul_kv.2015-10-21.json' % current_dir
     backup_copy = '%s/test_backup.json' % current_dir
     # make a copy to test
     shutil.copyfile(backup_file, backup_copy)
     self.assertTrue(os.path.exists(backup_file))
     with mock_s3():
         # Create the mocked S3 session object
         s3 = boto3.resource('s3')
         # See to it that the expected S3 bucket exists
         s3.create_bucket(Bucket=S3_bucket)
         # Upload the backup file to the mocked S3
         s3_upload_file(s3, backup_copy, S3_bucket)
         # Is the file in the bucket
         bucket_contents = [o.key for o in s3.Bucket(S3_bucket).objects.all()]
         # Is it what we expect?
         expected_contents = [os.path.basename(backup_copy)]
         self.assertEqual(bucket_contents, expected_contents)
         # Now check if we can download the file
         os.remove(backup_copy)
         # It really is no longer there
         self.assertFalse(os.path.exists(backup_copy))
         # Download the file from mocked S3
         s3_download_file(s3, backup_copy, S3_bucket)
         # The file should be back
         self.assertTrue(os.path.exists(backup_copy))
         # and be the same as the original
         self.assertTrue(filecmp.cmp(backup_file, backup_copy, shallow=False))
         # Finally, remove the copy
         os.remove(backup_copy)
Example #11
0
    def setUp(self):
        mock_s3 = moto.mock_s3()
        mock_s3.start()
        self.addCleanup(mock_s3.stop)

        self.s3 = boto.connect_s3()
        self.s3.create_bucket('test_s3_bucket')
Example #12
0
 def test_cache_control(self):
     if not sys.version_info[:2] == (3, 4):
         from moto import mock_s3
         with mock_s3():
             # Set random max-age for various content types
             with self.settings(BAKERY_CACHE_CONTROL={
                 "application/javascript": random.randint(0, 100000),
                 "text/css": random.randint(0, 100000),
                 "text/html": random.randint(0, 100000),
             }):
                 conn = boto.connect_s3()
                 bucket = conn.create_bucket(settings.AWS_BUCKET_NAME)
                 call_command("build")
                 call_command("publish", no_pooling=True, verbosity=3)
                 for key in bucket:
                     key = bucket.get_key(key.name)
                     if key.content_type in settings.BAKERY_CACHE_CONTROL:
                         # key.cache_control returns string
                         # with "max-age=" prefix
                         self.assertIn(
                             str(settings.BAKERY_CACHE_CONTROL.get(
                                 key.content_type)),
                             key.cache_control
                         )
     else:
         self.skipTest("Moto doesn't work in Python 3.4")
Example #13
0
    def mock_s3_resource(self):
        mock = mock_s3()
        mock.start()

        yield mock

        mock.stop()
Example #14
0
    def setUp(self):
        self.mock = mock_s3()
        self.mock.start()

        #
        # Populate the data in mock S3
        #
        # s3+file first
        conn = boto.connect_s3()
        b = conn.create_bucket(self.bucket_name)
        k = Key(b)
        k.name = self.key_name
        with open(test_file(self.key_name), 'rb') as f:
            k.set_contents_from_file(f)

        # s3+dir
        b = conn.create_bucket(self.dir_bucket_name)
        for fname in ('index.json', '1', '2', '3', '4', '5', '6'):
            k = Key(b)
            k.name = posixpath.join(self.dir_list_name, fname)
            with open(test_file(posixpath.join('delta_dir_source', fname)),
                      'rb') as f:
                k.set_contents_from_file(f)

        # initialize the internal list data structure via the normal method
        super(S3SourceListsTest, self).setUp()
Example #15
0
    def test_gzip_and_send_s3(self):
        """
        Tests that a gzip is made and sent to S3 and everything cleaned after
        """
        # First create some dummy content to work with

        output_path = '{0}/test_out/'.format(os.getcwd())
        helper_extract_all(cluster=self.cluster, output_path=output_path)

        with mock_s3():
            s3_resource = boto3.resource('s3')
            s3_resource.create_bucket(Bucket=self.s3_details['bucket'])

            # Run the gzip and send
            dashboard.push_to_s3(
                input_directory=output_path,
                s3_details=self.s3_details
            )

            # Check there is a gzip in the bucket
            s3_object = s3_resource.Object(
                self.s3_details['bucket'],
                'dashboard.tar.gz'
            )

            keys = s3_object.get().keys()
            self.assertTrue(
                len(keys) > 0
            )

            # Clean up files
            shutil.rmtree(output_path)
    def setUp(self):
        self.mock_s3 = moto.mock_s3()
        self.mock_s3.start()
        self.s3_conn = boto.connect_s3()
        self.s3_conn.create_bucket('last_bucket')
        bucket = self.s3_conn.get_bucket('last_bucket')
        key = bucket.new_key('test_list/LAST')
        self.pointers = ['pointer1', 'pointer2', 'pointer3', '']
        key.set_contents_from_string('\r\n'.join(self.pointers))
        key.close()

        for key_name in POINTER_KEYS:
            key = bucket.new_key(key_name)
            out = StringIO.StringIO()
            with gzip.GzipFile(fileobj=out, mode='w') as f:
                f.write(json.dumps({'name': key_name}))
            key.set_contents_from_string(out.getvalue())
            key.close()

        self.options_prefix_pointer = {
            'bucket': 'last_bucket',
            'aws_access_key_id': 'KEY',
            'aws_secret_access_key': 'SECRET',
            'prefix_pointer': 'test_list/LAST'
        }
Example #17
0
 def setUp(self):
     f = tempfile.NamedTemporaryFile(mode='wb', delete=False)
     self.tempFilePath = f.name
     f.write(b"I'm a temporary file for testing\n")
     f.close()
     self.mock_s3 = mock_s3()
     self.mock_s3.start()
    def pre_test_setup(self):
        self.account_type = AccountType(name='AWS')
        db.session.add(self.account_type)
        db.session.commit()

        for x in range(0, 9):
            db.session.add(Account(name="account{}".format(x), account_type_id=self.account_type.id,
                                   identifier="01234567891{}".format(x), active=True))

        db.session.commit()

        mock_sts().start()
        mock_s3().start()

        self.s3_client = boto3.client("s3")
        self.s3_client.create_bucket(Bucket="testBucket")
Example #19
0
    def test_cache_control(self):
        s3 = boto3.resource('s3')
        with mock_s3():
            # Set random max-age for various content types
            with self.settings(BAKERY_CACHE_CONTROL={
                "application/javascript": random.randint(0, 100000),
                "text/css": random.randint(0, 100000),
                "text/html": random.randint(0, 100000),
            }):
                self._create_bucket()
                call_command("build")
                call_command("publish", verbosity=3)

                for obj in self._get_bucket_objects():
                    s3_obj = s3.Object(
                        settings.AWS_BUCKET_NAME, obj.get('Key'))

                    if s3_obj.content_type in settings.BAKERY_CACHE_CONTROL:
                        # key.cache_control returns string
                        # with "max-age=" prefix
                        self.assertIn(
                            str(settings.BAKERY_CACHE_CONTROL.get(
                                s3_obj.content_type)),
                            s3_obj.cache_control
                        )
Example #20
0
 def test_publish_cmd(self):
     if not sys.version_info[:2] == (3, 4):
         from moto import mock_s3
         with mock_s3():
             conn = boto.connect_s3()
             bucket = conn.create_bucket(settings.AWS_BUCKET_NAME)
             call_command("build")
             call_command("publish", no_pooling=True, verbosity=3)
             local_file_list = []
             for (dirpath, dirnames, filenames) in os.walk(
                     settings.BUILD_DIR):
                 for fname in filenames:
                     local_key = os.path.join(
                         os.path.relpath(dirpath, settings.BUILD_DIR),
                         fname
                     )
                     if local_key.startswith('./'):
                         local_key = local_key[2:]
                     local_file_list.append(local_key)
             for key in bucket.list():
                 self.assertIn(key.name, local_file_list)
             call_command("unbuild")
             os.makedirs(settings.BUILD_DIR)
             call_command("publish", no_pooling=True, verbosity=3)
     else:
         self.skipTest("Moto doesn't work in Python 3.4")
Example #21
0
def s3():
    # writable local S3 system
    m = moto.mock_s3()
    m.start()
    import boto3
    client = boto3.client('s3')
    client.create_bucket(Bucket=test_bucket_name, ACL='public-read')
    for k in [a, b, c, d]:
        try:
            client.delete_object(Bucket=test_bucket_name, Key=k)
        except:
            pass
    for flist in [files, csv_files, text_files]:
        for f, data in flist.items():
            client.put_object(Bucket=test_bucket_name, Key=f, Body=data)
    yield S3FileSystem(anon=False)
    for flist in [files, csv_files, text_files]:
        for f, data in flist.items():
            try:
                client.delete_object(Bucket=test_bucket_name, Key=f, Body=data)
            except:
                pass
    for k in [a, b, c, d]:
        try:
            client.delete_object(Bucket=test_bucket_name, Key=k)
        except:
            pass
    m.stop()
Example #22
0
    def setUp(self):
        mock_s3 = moto.mock_s3()
        mock_s3.start()
        self.addCleanup(mock_s3.stop)

        self.s3 = boto3.client('s3')
        self.s3.create_bucket(Bucket='test_s3_bucket')
Example #23
0
def s3_bucket():
    """S3 mock service"""
    mock = mock_s3()
    mock.start()
    s3_client = session.client('s3')
    s3_client.create_bucket(Bucket='test_bucket_1')
    yield s3_client.list_buckets()
    mock.stop()
    def setUp(self):
        self.mock_s3 = mock_s3()
        self.mock_s3.start()

        conn = boto.connect_s3()
        conn.create_bucket('test')

        self.ns = S3NodeStorage(bucket_name='test')
Example #25
0
    def setUp(self):
        self.mock = moto.mock_s3()
        self.mock.start()

        s3 = boto3.resource('s3')
        s3.Bucket(TEST_BUCKET_NAME).create()

        self.storage = S3HttpStorage(bucket=TEST_BUCKET_NAME)
Example #26
0
def s3():
    # writable local S3 system
    with moto.mock_s3() as m:
        client = boto3.client('s3')
        client.create_bucket(Bucket=test_bucket_name, ACL='public-read-write')
        for f, data in files.items():
            client.put_object(Bucket=test_bucket_name, Key=f, Body=data)
        yield S3FileSystem(anon=True)
    def test_fetch_aws_canonical_ids_command(self):
        accounts = Account.query.all()
        fetch_aws_canonical_ids(False)

        for account in accounts:
            assert len(account.custom_fields) == 1
            assert account.custom_fields[0].name == "canonical_id"
            assert account.custom_fields[0].value == "bcaf1ffd86f41161ca5fb16fd081034f"  # Default from moto.

            # Make it something else to test overrides:
            account.custom_fields[0].value = "replaceme"
            db.session.add(account)

        db.session.commit()

        # Test without override (nothing should be changed):
        fetch_aws_canonical_ids(False)
        for account in accounts:
            assert len(account.custom_fields) == 1
            assert account.custom_fields[0].name == "canonical_id"
            assert account.custom_fields[0].value == "replaceme"

        # Test override:
        fetch_aws_canonical_ids(True)
        for account in accounts:
            assert len(account.custom_fields) == 1
            assert account.custom_fields[0].name == "canonical_id"
            assert account.custom_fields[0].value == "bcaf1ffd86f41161ca5fb16fd081034f"  # Default from moto.

        # Create an inactive account:
        inactive = Account(name="inactive", account_type_id=self.account_type.id,
                           identifier="109876543210")
        db.session.add(inactive)
        db.session.commit()

        # Run the change again:
        fetch_aws_canonical_ids(True)

        # Ensure that nothing happened to the inactive account:
        assert len(inactive.custom_fields) == 0

        # Also verify that no exceptions were encountered:
        assert len(ExceptionLogs.query.all()) == 0

        mock_sts().stop()
        mock_s3().stop()
Example #28
0
def my_mock_s3(request):
    """The purpose of this fixture is to setUp/tearDown the moto library"""
    m = mock_s3()
    m.start()

    def fin():
        m.stop()
    request.addfinalizer(fin)
Example #29
0
 def setUp(self):
     self.mock = moto.mock_s3()
     self.mock.start()
     conn = boto.connect_s3()
     self.bucket = conn.create_bucket('bucket')
     self.store = keepaneyeon.s3.S3Storage(
             access_key='access', secret_access_key='secret',
             path='s3://bucket/path/to')
Example #30
0
    def test_watcher_exceptions(self):
        """
        Tests that if exceptions are encountered, the watcher continues.

        Unfortunately -- moto lacks all of the S3 methods that we need. So this is just a
        test to ensure that exception handling works OK.
        :return:
        """
        mock_sts().start()

        s3_watcher = S3(accounts=[self.account.name])
        s3_watcher.slurp()

        assert len(ExceptionLogs.query.all()) == 3  # We created 3 buckets

        mock_s3().stop()
        mock_sts().stop()
Example #31
0
    def test_no_data(self):
        with mock_s3():
            source_url = "s3+file://{0}/{1}".format(self.bucket_name,
                                                    self.key_name)

            # No bucket
            f = S3FileSource(source_url, 0.5)
            with self.assertRaises(NoDataError) as ecm:
                f.load()
            self.assertEqual(
                str(ecm.exception),
                'Could not find bucket "{0}": S3ResponseError: '
                '404 Not Found\n'.format(self.bucket_name))

            # Empty bucket
            boto.connect_s3().create_bucket(self.bucket_name)
            f = S3FileSource(source_url, 0.5)
            with self.assertRaises(NoDataError) as ecm:
                f.load()
            self.assertEqual(str(ecm.exception),
                             'No chunk file found at "{0}"'.format(source_url))
Example #32
0
    def test_class(self):
        with moto.mock_s3():
            whitelist = {"s3": {"bucket": ["test"]}}
            settings = {
                "general": {
                    "dry_run": False
                },
                "services": {
                    "s3": {
                        "buckets": {
                            "clean": True,
                            "ttl": -1
                        }
                    }
                },
            }
            execution_log = {"AWS": {}}

            test_class = s3_cleanup.S3Cleanup(logging, whitelist, settings,
                                              execution_log)
            yield test_class
Example #33
0
    def test_s3_save(self):
        with mock_s3():
            import boto3
            client = boto3.client('s3')
            client.create_bucket(Bucket='fake-matrix-bucket',
                                 ACL='public-read-write')

            matrix_store_list = self.matrix_store()

            for matrix_store in matrix_store_list:
                if isinstance(matrix_store, CSVMatrixStore):
                    matrix_store.save(project_path='s3://fake-matrix-bucket',
                                      name='test')
                    # CSV
                    csv = CSVMatrixStore(
                        matrix_path='s3://fake-matrix-bucket/test.csv',
                        metadata_path='s3://fake-matrix-bucket/test.yaml')

                    assert csv.metadata == matrix_store_list[0].metadata
                    assert csv.matrix.to_dict(
                    ) == matrix_store_list[0].matrix.to_dict()
Example #34
0
def s3_client():
    mock_s3 = moto.mock_s3()
    mock_s3.start()

    client = boto3.client('s3')
    client.create_bucket(Bucket=test_bucket_name)
    client.put_object(Bucket=test_bucket_name,
                      Key=test_file_path,
                      Body=test_nb_content)
    client.put_object(Bucket=test_bucket_name,
                      Key=test_empty_file_path,
                      Body='')
    yield S3()
    try:
        client.delete_object(Bucket=test_bucket_name, Key=test_file_path)
        client.delete_object(Bucket=test_bucket_name,
                             Key=test_file_path + '.txt')
        client.delete_object(Bucket=test_bucket_name, Key=test_empty_file_path)
    except Exception:
        pass
    mock_s3.stop()
Example #35
0
    def test_run(self):
        with mock_s3():
            client = S3Client()
            client.s3.meta.client.create_bucket(Bucket=BUCKET)
            for key in FILES:
                k = '%s/%s' % (KEY, key)
                client.put_string('', 's3://%s/%s' % (BUCKET, k))
            folder_path = 's3://%s/%s' % (BUCKET, KEY)
            path = 's3://%s/%s/%s' % (BUCKET, 'manifest', 'test.manifest')
            folder_paths = [folder_path]

            m = mock.mock_open()
            with mock.patch('luigi.contrib.s3.S3Target.open', m, create=True):
                t = redshift.RedshiftManifestTask(path, folder_paths)
                luigi.build([t], local_scheduler=True)

            expected_manifest_output = json.dumps(
                generate_manifest_json(folder_paths, FILES))

            handle = m()
            handle.write.assert_called_with(expected_manifest_output)
Example #36
0
def test_aws_keys_can_be_none():
    """
    Verify that the os.environ[KEY] can be None
    Patching the None-value shouldn't be an issue
    """
    original = os.environ.get(KEY, "value-set-by-user")
    # Delete the original value by the user
    try:
        del os.environ[KEY]
    except KeyError:
        pass  # Value might not be set on this system in the first place
    try:
        # Verify that the os.environ[KEY] is patched
        with mock_s3():
            patched_value = os.environ[KEY]
            patched_value.should.equal("foobar_key")
        # Verify that the os.environ[KEY] is unpatched, and reverts to None
        assert os.environ.get(KEY) is None
    finally:
        # Reset the value original - don't want to change the users system
        os.environ[KEY] = original
def handler(scope="session", autouse=True):
    """ Setup the handler to be used in the tests """
    os.environ["BASEDIR"] = "D:\\Personnal\\repos\\moviepickme\\tmp\\"
    os.environ["IMDB_DATASET_BASE_URL"] = "https://datasets.imdbws.com/"
    os.environ["STORAGE_BUCKET"] = "mybucket"
    os.environ["IMDB_DATASET_FILES"] = "title.ratings.tsv.gz"

    s3_mock = mock_s3()
    s3_mock.start()

    sts_mock = mock_sts()
    sts_mock.start()

    stepfunctions_mock = mock_stepfunctions()
    stepfunctions_mock.start()

    yield imdb_dataset_handler.ImdbDatasetHandler()

    s3_mock.stop()
    sts_mock.stop()
    stepfunctions_mock.stop()
Example #38
0
def test_s3_io(monkeypatch, without_aws_env):
    import moto
    from numpy import s_

    url = "s3://bucket/file.txt"
    bucket, _ = s3_url_parse(url)
    monkeypatch.setenv("AWS_ACCESS_KEY_ID", "fake-key-id")
    monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "fake-secret")

    with moto.mock_s3():
        s3 = s3_client(region_name='kk')
        s3.create_bucket(Bucket=bucket)
        assert s3_dump(b"33", url, s3=s3) is True
        assert s3_fetch(url, s3=s3) == b"33"

        assert s3_dump(b"0123456789ABCDEF", url, s3=s3) is True
        assert s3_fetch(url, range=s_[:4], s3=s3) == b"0123"
        assert s3_fetch(url, range=s_[3:8], s3=s3) == b"34567"

        with pytest.raises(ValueError):
            s3_fetch(url, range=s_[::2], s3=s3)
Example #39
0
def s3():
    with ensure_safe_environment_variables():
        # temporary workaround as moto fails for botocore >= 1.11 otherwise,
        # see https://github.com/spulec/moto/issues/1924 & 1952
        os.environ.setdefault("AWS_ACCESS_KEY_ID", "foobar_key")
        os.environ.setdefault("AWS_SECRET_ACCESS_KEY", "foobar_secret")

        # writable local S3 system
        import moto

        with moto.mock_s3():
            client = boto3.client("s3")
            client.create_bucket(Bucket=test_bucket_name, ACL="public-read-write")
            for f, data in files.items():
                client.put_object(Bucket=test_bucket_name, Key=f, Body=data)
            fs = s3fs.S3FileSystem(anon=True)
            fs.invalidate_cache
            yield fs

            httpretty.HTTPretty.disable()
            httpretty.HTTPretty.reset()
Example #40
0
    def test_class(self):
        with moto.mock_s3():
            whitelist = {}
            settings = {
                "general": {
                    "dry_run": False
                },
                "services": {
                    "s3": {
                        "buckets": {
                            "clean": True,
                            "ttl": 5000
                        }
                    }
                },
            }
            resource_tree = {"AWS": {}}

            test_class = s3_cleanup.S3Cleanup(logging, whitelist, settings,
                                              resource_tree)
            yield test_class
Example #41
0
    def test_publish_cmd(self):
        with mock_s3():
            self._create_bucket()
            call_command("build")
            call_command("publish", verbosity=3)
            local_file_list = []
            for (dirpath, dirnames, filenames) in os.walk(settings.BUILD_DIR):
                for fname in filenames:
                    local_key = os.path.join(
                        os.path.relpath(dirpath, settings.BUILD_DIR), fname)
                    if local_key.startswith('./'):
                        local_key = local_key[2:]
                    local_file_list.append(local_key)

            for obj in self._get_bucket_objects():
                self.assertIn(obj.get('Key'), local_file_list)
            call_command("unbuild")
            os.makedirs(settings.BUILD_DIR)
            call_command("publish", verbosity=3)
            call_command("publish", no_delete=True, force=True)
            call_command("publish", aws_bucket_prefix='my-branch')
Example #42
0
    def test_uri(self):
        calc_db = TestToDb(
            host_uri="mongodb://localhost:27017",
            database="test_db_name",
            collection="test_collection",
        )
        calc_db.collection.insert_one({"task_id": "mp-1", "data": "12345"})
        self.assertEqual(calc_db.collection.find_one()["data"], "12345")

        with mock_s3():
            conn = boto3.client("s3")
            conn.create_bucket(Bucket="test_bucket")
            uri_db = TestToDb.from_db_file(db_dir + "/db_aws_uri.json")
            store = uri_db.get_store("test")
            self.assertEqual(store.sub_dir, "atomate_test/")

            store.connect()
            store.update([{"fs_id": "mp-1", "data": "111111111110111111"}])
            res = store.query_one({"fs_id": "mp-1"})
            self.assertEqual(res["fs_id"], "mp-1")
            self.assertEqual(res["data"], "111111111110111111")
Example #43
0
    def setup(self):
        if not mock_s3 or not boto3:
            raise SkipTest("boto3 and moto required for S3 tests")

        TileCacheTestBase.setup(self)

        self.mock = mock_s3()
        self.mock.start()

        self.bucket_name = "test"
        dir_name = 'mapproxy'

        boto3.client("s3").create_bucket(Bucket=self.bucket_name)

        self.cache = S3Cache(
            dir_name,
            file_ext='png',
            directory_layout='tms',
            bucket_name=self.bucket_name,
            profile_name=None,
        )
Example #44
0
    def setUp(self):
        """Set environment variables and setup the mocks."""
        os.environ['BATCH_LAMBDA_NAME'] = 'test_batch_lambda_name'
        os.environ['BATCH_LAMBDA_QUALIFIER'] = 'Production'
        os.environ['OBJECTS_PER_MESSAGE'] = '2'
        os.environ['S3_BUCKET_NAME'] = 'test_s3_bucket'
        os.environ['SQS_QUEUE_URL'] = 'https://sqs.us-east-1.amazonaws.com/1234/test_queue'

        self._mocks = [moto.mock_cloudwatch(), moto.mock_lambda(), moto.mock_s3(), moto.mock_sqs()]
        for mock in self._mocks:
            mock.start()

        # Import batch lambda handler _after_ the mocks have been initialized.
        from lambda_functions.batcher import main
        self.batcher_main = main

        self._bucket = boto3.resource('s3').Bucket(os.environ['S3_BUCKET_NAME'])
        self._bucket.create()

        response = boto3.client('sqs').create_queue(QueueName='test_queue')
        self._queue = boto3.resource('sqs').Queue(response['QueueUrl'])
Example #45
0
 def test_upload_to_s3(self):
     with moto.mock_s3():
         s3_conn = boto3.client('s3')
         s3_conn.create_bucket(Bucket='upload_bucket', ACL='public-read')
         client_kwargs = {
             'provider_access_key_id': 'test_fake_id',
             'provider_secret_access_key': 'test_fake_secret'
         }
         zarr_path = os.path.join(
             os.path.dirname(__file__),
             '../../examples/serve/demo/cube-1-250-250.zarr')
         ds1 = xr.open_zarr(zarr_path)
         write_cube(
             ds1,
             'https://s3.amazonaws.com/upload_bucket/cube-1-250-250.zarr',
             'zarr',
             client_kwargs=client_kwargs)
         self.assertIn(
             'cube-1-250-250.zarr/.zattrs',
             s3_conn.list_objects(
                 Bucket='upload_bucket')['Contents'][0]['Key'])
Example #46
0
    def s3(self, scope="session", autouse=True):
        mock = mock_s3()
        mock.start()
        # There is currently a bug on moto, this line is needed as a workaround
        # Ref: https://github.com/spulec/moto/issues/1926
        boto3.setup_default_session()

        target = "infosec.mozilla.org"
        s3_client = boto3.client('s3', 'us-west-2')
        test_bucket_name = "test-results"
        test_bucket = s3_client.create_bucket(Bucket=test_bucket_name)
        # Add objects to the mocked bucket
        s3_client.put_object(Bucket=test_bucket_name,
                             Body=b'XYZ',
                             Key='{}_httpobservatory.json'.format(target))
        s3_client.put_object(Bucket=test_bucket_name,
                             Body=b'XYZ',
                             Key='{}_tlsobservatory.json'.format(target))

        yield (target, s3_client, test_bucket, test_bucket_name)
        mock.stop()
Example #47
0
def s3store():
    with mock_s3():
        conn = boto3.client("s3")
        conn.create_bucket(Bucket="bucket1")

        index = MemoryStore("index")
        store = S3Store(index, "bucket1")
        store.connect()

        store.update([{
            "task_id": "mp-1",
            "data": "asd",
            store.last_updated_field: datetime.utcnow(),
        }])
        store.update([{
            "task_id": "mp-3",
            "data": "sdf",
            store.last_updated_field: datetime.utcnow(),
        }])

        yield store
Example #48
0
    def setUpMock(self):
        """
        Let Moto take over all socket communications
        """
        self.ec2mock = mock_ec2()
        self.ec2mock.start()
        self.s3mock = mock_s3()
        self.s3mock.start()
        HTTPretty.register_uri(HTTPretty.GET,
                               self.AWS_INSTANCE_DATA_DEFAULT_URL,
                               body=u"""
[
  {
    "family": "General Purpose",
    "enhanced_networking": false,
    "vCPU": 1,
    "generation": "current",
    "ebs_iops": 0,
    "network_performance": "Low",
    "ebs_throughput": 0,
    "vpc": {
      "ips_per_eni": 2,
      "max_enis": 2
    },
    "arch": [
      "x86_64"
    ],
    "linux_virtualization_types": [
        "HVM"
    ],
    "ebs_optimized": false,
    "storage": null,
    "max_bandwidth": 0,
    "instance_type": "t2.nano",
    "ECU": "variable",
    "memory": 0.5,
    "ebs_max_bandwidth": 0
  }
]
""")
Example #49
0
def test_lambda_function():
    event = {
        "JobId": "12345678901234567890",
        "JobName": "DiscoveringHotTopicsUsingMachineLearning-1595090521262",
        "JobStatus": "IN_PROGRESS",
        "SubmitTime": "2020-07-18T16:42:02.483Z",
        "InputDataConfig": {
            "S3Uri": "s3://ingestio-tqsdiegtrmkp",
            "InputFormat": "ONE_DOC_PER_LINE"
        },
        "OutputDataConfig": {
            "S3Uri": "s3://2305002e803b60a8443cb7dd30/output/output.tar.gz"
        },
        "NumberOfTopics": 10,
        "DataAccessRoleArn": "arn:aws:iam::someaccountid:role/testrolename"
    }

    with mock_s3():
        s3 = boto3.client('s3', region_name=os.environ.get('AWS_REGION'))
        with open(
                os.path.join(os.path.dirname(__file__), 'fixtures',
                             'output.tar.gz'), 'rb') as f:
            body = f.read()
        s3.create_bucket(Bucket='testbucket')
        s3.create_bucket(Bucket='inferencebucket')
        s3.put_object(
            Bucket='inferencebucket',
            Key='testaccount-TOPICS-some1233556sagsdfa/output/output.tar.gz',
            Body=body)
    from wf_publish_topic_model.lambda_function import handler
    from botocore.stub import Stubber
    event_bridge_client = boto3.client('events', os.environ['AWS_REGION'])
    stubber = Stubber(event_bridge_client)
    stubber.add_response('put_events', {
        'FailedEntryCount': 0,
        'Entries': [{
            'EventId': '12456663423'
        }]
    })
    stubber.activate()
Example #50
0
def simulate_storage(storage_type, root=None):
    if storage_type == "fs":
        if root is None:
            with tempfile.TemporaryDirectory() as d:
                yield "file://" + d
        else:
            yield "file://" + root
    elif storage_type == "s3":
        import uuid

        from moto import mock_s3

        from ray.tests.mock_s3_server import start_service, stop_process

        @contextmanager
        def aws_credentials():
            old_env = os.environ
            os.environ["AWS_ACCESS_KEY_ID"] = "testing"
            os.environ["AWS_SECRET_ACCESS_KEY"] = "testing"
            os.environ["AWS_SECURITY_TOKEN"] = "testing"
            os.environ["AWS_SESSION_TOKEN"] = "testing"
            yield
            os.environ = old_env

        @contextmanager
        def moto_s3_server():
            host = "localhost"
            port = 5002
            url = f"http://{host}:{port}"
            process = start_service("s3", host, port)
            yield url
            stop_process(process)

        if root is None:
            root = uuid.uuid4().hex
        with moto_s3_server() as s3_server, aws_credentials(), mock_s3():
            url = f"s3://{root}?region=us-west-2&endpoint_override={s3_server}"
            yield url
    else:
        raise ValueError(f"Unknown storage type: {storage_type}")
Example #51
0
    def test_no_data(self):
        source_url = "s3+dir://tarantula/bigandblue/"
        index_url = posixpath.join(source_url, 'index.json')

        index_body = """{
                            "name": "tarantula",
                            "chunks": {
                                "1": {
                                    "path": "1"
                                }
                            }
                        }"""

        with mock_s3():
            # No bucket
            d = S3DirectorySource(source_url, 1)
            with self.assertRaises(NoDataError) as ecm:
                d.load()
            self.assertEqual(str(ecm.exception), 'No such bucket "tarantula"')

            # Empty bucket
            # save bucket object for use in index-no-data-file test below
            b = boto.connect_s3().create_bucket("tarantula")
            d = S3DirectorySource(source_url, 1)
            with self.assertRaises(NoDataError) as ecm:
                d.load()
            self.assertEqual(str(ecm.exception),
                             'No index file found at "{0}"'.format(index_url))

            # Index present but with missing data files
            k = Key(b)
            k.name = 'bigandblue/index.json'
            k.set_contents_from_string(index_body)
            d = S3DirectorySource(source_url, 1)
            with self.assertRaises(NoDataError) as ecm:
                d.load()
            self.assertEqual(
                str(ecm.exception), 'Parsing failure: Error parsing '
                '"/bigandblue/index.json": Invalid chunk '
                'filename: "1"')
Example #52
0
 def test_cache_control(self):
     with mock_s3():
         # Set random max-age for various content types
         with self.settings(
                 BAKERY_CACHE_CONTROL={
                     "application/javascript": random.randint(0, 100000),
                     "text/css": random.randint(0, 100000),
                     "text/html": random.randint(0, 100000),
                 }):
             conn = boto.connect_s3()
             bucket = conn.create_bucket(settings.AWS_BUCKET_NAME)
             call_command("build")
             call_command("publish", no_pooling=True, verbosity=3)
             for key in bucket:
                 key = bucket.get_key(key.name)
                 if key.content_type in settings.BAKERY_CACHE_CONTROL:
                     # key.cache_control returns string
                     # with "max-age=" prefix
                     self.assertIn(
                         str(
                             settings.BAKERY_CACHE_CONTROL.get(
                                 key.content_type)), key.cache_control)
Example #53
0
    def setUp(self):
        self.mock = mock_s3()
        self.mock.start()

        self.bucket = 'tflens-test-bucket'
        self.valid_content_key = 'tflens/terraform.tfstate'
        self.non_valid_key = 'none'

        self.valid_tfstate_file = "s3://{}/{}".format(self.bucket,
                                                      self.valid_content_key)
        self.non_existing_tfstate_file = "s3://{}/{}".format(
            self.bucket, self.non_valid_key)
        self.non_valid_tfstate_location = "s3:/{}/{}".format(
            self.bucket, self.valid_content_key)

        s3 = boto3.client("s3")
        s3.create_bucket(
            Bucket=self.bucket,
            CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'})
        s3.put_object(Bucket=self.bucket,
                      Key=self.valid_content_key,
                      Body=json.dumps(VALID_TFSTATE_CONTENT_WITH_RESOURCES))
Example #54
0
 def setUp(self):
     super(TestCloudFrontS3Storage, self).setUp()
     self.s3_mock = mock_s3()
     self.s3_mock.start()
     self.settings = {
         "storage.bucket":
         "mybucket",
         "storage.aws_access_key_id":
         "abc",
         "storage.aws_secret_access_key":
         "bcd",
         "storage.cloud_front_domain":
         "https://abcdef.cloudfront.net",
         "storage.cloud_front_key_file":
         "",
         "storage.cloud_front_key_string":
         "-----BEGIN RSA PRIVATE KEY-----\n"
         "MIICXQIBAAKBgQDNBN3WHzIgmOEkBVNdBlTR7iGYyUXDVuFRkJlYp/n1/EZf2YtE\n"
         "BpxJAgqdwej8beWV16QXOnKXQpsGAeu7x2pvOGFyRGytmLDeUXayfIF/E46w83V2\n"
         "r53NOBrlezagqCAz9uafocyNaXlxZfp4tx82sEmpSmHGwd//+n6zgXNC0QIDAQAB\n"
         "AoGAd5EIA1GMPYCLhSNp+82ueARGKcHwYrzviU8ob5D/cVtge5P26YRlbxq2sEyf\n"
         "oWBCTgJGW5mlrNuWZ4mFPq1NP2X2IU80k/J67KOuOelAykIVQw6q6GAjtmh40x4N\n"
         "EekoFzxVqoFKqWOJ1UNP0jNOyfzxU5dfzvw5GOEXob9usjECQQD3++wWCoq+YRCz\n"
         "8qqav2M7leoAnDwmCYKpnugDU0NR61sZADS3kJHnhXAbPFQI4dRfETJOkKE/iDph\n"
         "G0Rtdfm1AkEA06VoI49wjEMYs4ah3qwpvhuVyxVa9iozIEoDYiVCOOBZw8rX79G4\n"
         "+5yzC9ehy9ugWttSA2jigNXVB6ORN3+mLQJBAM47lZizBbXUdZahvp5ZgoZgY65E\n"
         "QIWFrUOxYtS5Hyh2qlk9YZozwhOgVp5f6qdEYGD7pTHPeDqk6aAulBbQYW0CQQC4\n"
         "hAw2dGd64UQ3v7h/mTkLNKFzXDrhQgkwrVYlyrXhQDcCK2X2/rB3LDYsrOGyCNfU\n"
         "XkEyF87g44vGDSQdbnxBAkA1Y+lB/pqdyHMv5RFabkBvU0yQDfekAKHeQ6rS+21g\n"
         "dWedUVc1JNnKtb8W/rMfdjg9YLYqUTvoBvp0DjfwdYc4\n"
         "-----END RSA PRIVATE KEY-----",
         "storage.cloud_front_key_id":
         "key-id",
     }
     s3 = boto3.resource("s3")
     self.bucket = s3.create_bucket(Bucket="mybucket")
     patch.object(CloudFrontS3Storage, "test", True).start()
     kwargs = CloudFrontS3Storage.configure(self.settings)
     self.storage = CloudFrontS3Storage(MagicMock(), **kwargs)
Example #55
0
def s3_data_dir():
    """Used for testing filesystem and data_import.

    The bucket is purposely named s3-... as this previously introduced a bug and
    it is often the default naming scheme for buckets in the enterprise clouds.
    """
    mock = mock_s3()
    mock.start()
    import boto3
    import s3fs

    conn = boto3.client("s3")
    conn.create_bucket(Bucket="s3-test-bucket")
    fs = s3fs.S3FileSystem()

    for i in range(1, 11):
        fname = f"202002{i:02d}_data.csv"
        with fs.open(f"s3://s3-test-bucket/dump/{fname}", "wb") as fp:
            fp.write(b"hello")

    yield fs
    mock.stop()
def mocked_encrypted_s3_bucket():
    bucket_policy = {
        "Version": "2012-10-17",
        "Id": "PutObjPolicy",
        "Statement": [
            {
                "Sid": "DenyUnEncryptedObjectUploads",
                "Effect": "Deny",
                "Principal": "*",
                "Action": "s3:PutObject",
                "Resource": "arn:aws:s3:::{}/*".format(BUCKET_NAME),
                "Condition": {"Null": {"s3:x-amz-server-side-encryption": "aws:kms"}},
            }
        ],
    }
    bucket_policy = json.dumps(bucket_policy)

    with mock_s3():
        conn = s3fs.core.boto3.client("s3", **AWS_CREDENTIALS)
        conn.create_bucket(Bucket=BUCKET_NAME)
        conn.put_bucket_policy(Bucket=BUCKET_NAME, Policy=bucket_policy)
        yield conn
Example #57
0
    def setUp(self):
        self.mock = mock_s3()
        self.mock.start()

        #
        # Populate the data in mock S3
        #
        conn = boto.connect_s3()

        # s3+dir lists_served bucket first
        b = conn.create_bucket(self.lists_served_bucket_name)
        for fname in [
                'mozpub-track-digest256.ini', 'testpub-bananas-digest256.ini'
        ]:
            k = Key(b)
            k.name = fname
            f = open(
                os.path.join(os.path.dirname(__file__), 'lists_served_s3',
                             fname))
            k.set_contents_from_file(f)

        # s3+file contents
        b = conn.create_bucket(self.bucket_name)
        k = Key(b)
        k.name = self.key_name
        with open(test_file(self.key_name), 'rb') as f:
            k.set_contents_from_file(f)

        # s3+dir keys and contents
        b = conn.create_bucket(self.dir_bucket_name)
        for fname in ('index.json', '1', '2', '3', '4', '5', '6'):
            k = Key(b)
            k.name = posixpath.join(self.dir_list_name, fname)
            with open(test_file(posixpath.join('delta_dir_source', fname)),
                      'rb') as f:
                k.set_contents_from_file(f)

        # initialize the internal list data structure via the normal method
        super(S3SourceListsTest, self).setUp()
def s3_buckets():
    with mock_s3():
        boto3.setup_default_session(region_name='eu-west-1')

        client = boto3.client('s3')

        client.create_bucket(Bucket='empty-bucket')

        client.create_bucket(Bucket='busy-bucket')
        for key in [
                'key1',
                'key2',
                'key3',
                'longkey1',
                'longkey2',
                'longkey3_suffix',
                'longkey4_suffix',
                'miscellaneous',
        ]:
            client.put_object(Bucket='busy-bucket', Key=key, Body=b'')

        yield
Example #59
0
    def setUp(self):
        # Mock S3 is used for all tests
        self.mock = mock_s3()
        self.mock.start()

        # Create a connection to our mock S3 and populate it
        self.s3_client = boto3.client('s3', region_name='us-east-1')
        self.s3_client.create_bucket(Bucket='my_bucket')

        # Make a temporary directory to simulate the server root
        self.temp_dir = tempfile.mkdtemp()

        self.cfg = {
            'server_name': 'my_server',
            'world_name': 'my_world',
            'server_root_dir': self.temp_dir,
            's3_bucket': 'my_bucket'
        }

        # Create the empty backup directory
        self.backup_path = os.path.join(self.temp_dir, 'my_server', 'backups')
        os.makedirs(self.backup_path)
Example #60
0
    def setUpClass(cls):
        cls.config_local = {"filesystem": "local", "extension": "png"}

        cls.config_s3 = {
            "filesystem": "s3",
            "extension": "png",
            "bucket": "my_bucket"
        }

        # Set up bucket
        cls.mock_s3 = mock_s3()
        cls.mock_s3.start()

        client = boto3.client('s3', region_name="us-east-1")
        _ = client.create_bucket(ACL='private', Bucket=cls.config_s3["bucket"])
        waiter = client.get_waiter('bucket_exists')
        waiter.wait(Bucket=cls.config_s3["bucket"])

        s3 = boto3.resource('s3')
        bucket = s3.Bucket(cls.config_s3["bucket"])

        # Put images in S3
        cls.test_imgs = [
            os.path.join(
                resource_filename("ingestclient", "test/data/example_z_stack"),
                "3253_my_stack_section000.png"),
            os.path.join(
                resource_filename("ingestclient", "test/data/example_z_stack"),
                "3254_my_stack_section001.png")
        ]

        cls.imgs = [
            "example_z_stack/3253_my_stack_section000.png",
            "example_z_stack/3254_my_stack_section001.png"
        ]

        for key, img in zip(cls.imgs, cls.test_imgs):
            # put file
            bucket.upload_file(img, key)