Example #1
0
def test_user_provided_timeout():

    drain()

    send_message = "hello"
    work_time = 2.0

    qp = SQSPollAccess(test_awsimple_str,
                       visibility_timeout=round(10.0 * work_time),
                       immediate_delete=False,
                       profile_name=test_awsimple_str)
    qp.create_queue()
    qp.send(send_message)
    time.sleep(1.0)
    receive_message = qp.receive_message()
    assert receive_message.message == send_message

    q = SQSAccess(test_awsimple_str, profile_name=test_awsimple_str)
    q.create_queue()
    assert q.receive_message(
    ) is None  # make sure the message is now invisible

    if not is_mock():
        receive_message.delete()  # not working for mock todo: fix

    assert q.receive_message() is None
Example #2
0
def test_s3_big_file_upload(s3_access):
    # test big file upload (e.g. that we don't get a timeout)
    # this is run before the cache tests (hence the function name)

    last_run = 0.0
    big_last_run_file_path = Path("big_last_run.txt")
    big_last_run_file_path.parent.mkdir(exist_ok=True, parents=True)
    if not is_mock():
        try:
            last_run = float(big_last_run_file_path.open().read().strip())
        except FileNotFoundError:
            pass

    # only run once a day max since it takes so long
    if last_run + timedelta(days=1).total_seconds() < time.time():

        big_file_path = Path(temp_dir, big_file_name)
        size = big_file_max_size / 1000  # start with something small
        while size < big_file_max_size:
            size *= 2  # get bigger on each iteration
            size = min(big_file_max_size,
                       size)  # make sure at the end we do one of max size
            with big_file_path.open("w") as f:
                f.truncate(
                    round(size)
                )  # this quickly makes a (sparse) file filled with zeros
            start = time.time()
            s3_access.upload(big_file_path, big_file_name)
            print(f"{time.time() - start},{size:.0f}")

        big_last_run_file_path.open("w").write(str(time.time()))
    else:
        print(
            f"last run {time.time() - last_run} seconds ago so not running now"
        )
Example #3
0
def test_actually_timeout():

    drain()

    send_message = "hello"
    work_time = 5.0

    qp = SQSPollAccess(test_awsimple_str,
                       visibility_timeout=round(0.5 * work_time),
                       immediate_delete=False,
                       profile_name=test_awsimple_str)
    qp.create_queue()
    qp.send(send_message)
    time.sleep(1.0)
    receive_message = qp.receive_message()
    assert receive_message.message == send_message  # got it once

    q = SQSAccess(test_awsimple_str, profile_name=test_awsimple_str)
    assert q.receive_message(
    ) is None  # make sure the message is now invisible
    time.sleep(
        work_time
    )  # will take "too long", so message should be available again on next receive_message

    if not is_mock():
        # not working for mock todo: fix
        assert qp.receive_message().message == send_message
        receive_message.delete()  # now we delete it

    assert q.receive_message() is None
Example #4
0
def test_aws_test():

    # test the test() method (basic AWS connection)

    # these should work
    if not is_mock():
        assert AWSAccess(profile_name=test_awsimple_str).test()
    assert S3Access(test_awsimple_str, profile_name=test_awsimple_str).test()
    assert DynamoDBAccess(test_awsimple_str,
                          profile_name=test_awsimple_str).test()
    assert SQSAccess(test_awsimple_str, profile_name=test_awsimple_str).test()

    if not is_mock():
        # this (non-existent) profile doesn't have access at all
        with pytest.raises(ProfileNotFound):
            AWSAccess(profile_name="IAmNotAProfile").test()
Example #5
0
def test_get_never_change_metadata(s3_access) -> (int, float, str):

    global never_change_size, never_change_mtime, never_change_etag

    if is_mock():

        # mocking always starts with nothing so we need up "upload" this file, but use boto3 so we don't write awsimple's SHA512

        test_file_path = Path(temp_dir, never_change_file_name)
        never_change_file_contents = "modification Aug 21, 2020 at 2:51 PM PT\nnever change this file\n"
        test_file_path.open("w").write(never_change_file_contents)
        s3_access.client.upload_file(
            str(test_file_path), test_awsimple_str,
            never_change_file_name)  # no awsimple SHA512

        keys = [
            obj["Key"] for obj in s3_access.client.list_objects_v2(
                Bucket=test_awsimple_str)["Contents"]
        ]
        assert never_change_file_name in keys

        metadata = s3_access.get_s3_object_metadata(never_change_file_name)
        never_change_mtime = metadata.mtime.timestamp()
        never_change_etag = metadata.etag
        never_change_size = metadata.size
def test_get_region():

    if not is_mock():
        # todo: get this to work with mocking
        region = AWSAccess(profile_name=test_awsimple_str).get_region()
        print(f"{region=}")
        print(f"{len(region)=}")
        assert len(region) >= 5  # make sure we get back something
Example #7
0
def test_get_account_id():
    if not is_mock():
        # todo: get this to work with mocking
        aws_access = AWSAccess()
        account_id = aws_access.get_account_id()
        assert len(
            account_id
        ) >= 12  # currently all account IDs are 12 numeric digits, but allow for them to increase in size (but still be only digits)
        assert account_id.isdigit()
        print(account_id)
def test_get_access_key():

    if not is_mock():
        # todo: get this to work with mocking
        access_key = AWSAccess(profile_name=test_awsimple_str).get_access_key()
        print(f"{access_key=}")
        print(f"{len(access_key)=}")
        # https://docs.aws.amazon.com/IAM/latest/APIReference/API_AccessKey.html
        assert len(access_key
                   ) >= 16  # as of this writing, the access key length was 20
Example #9
0
def test_get_table_names():
    if is_mock():
        dynamodb_access = DynamoDBAccess(
            test_awsimple_str, profile_name=test_awsimple_str
        )  # for mock we have to make the table
        dynamodb_access.create_table(
            id_str)  # have to create the table on the fly for mocking
    else:
        dynamodb_access = DynamoDBAccess(
            profile_name=test_awsimple_str
        )  # since we're only going to get the existing table names, we don't have to provide a table name
    dynamodb_tables = dynamodb_access.get_table_names()
    print(dynamodb_tables)
    assert len(dynamodb_tables) > 0
    assert test_awsimple_str in dynamodb_tables
Example #10
0
    def __init__(
        self,
        resource_name: str = None,
        profile_name: str = None,
        aws_access_key_id: str = None,
        aws_secret_access_key: str = None,
        region_name: str = None,
    ):
        """
        AWSAccess - takes care of basic AWS access (e.g. session, client, resource), getting some basic AWS information, and mock support for testing.

        :param resource_name: AWS resource name (e.g. s3, dynamodb, sqs, sns, etc.). Can be None if just testing the connection.

        # Provide either: profile name or access key ID/secret access key pair

        :param profile_name: AWS profile name
        :param aws_access_key_id: AWS access key (required if secret_access_key given)
        :param aws_secret_access_key: AWS secret access key (required if access_key_id given)
        :param region_name: AWS region (may be optional - see AWS docs)
        """

        import boto3  # import here to facilitate mocking

        self.resource_name = resource_name
        self.profile_name = profile_name
        self.aws_access_key_id = aws_access_key_id
        self.aws_secret_access_key = aws_secret_access_key
        self.region_name = region_name

        self._moto_mock = None
        self._aws_keys_save = {}

        # use keys in AWS config
        # https://docs.aws.amazon.com/cli/latest/userguide/cli-config-files.html
        kwargs = {}
        for k in [
                "profile_name", "aws_access_key_id", "aws_secret_access_key",
                "region_name"
        ]:
            if getattr(self, k) is not None:
                kwargs[k] = getattr(self, k)
        self.session = boto3.session.Session(**kwargs)

        if is_mock():

            # moto mock AWS
            for aws_key in [
                    "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY",
                    "AWS_SECURITY_TOKEN", "AWS_SESSION_TOKEN"
            ]:
                self._aws_keys_save[aws_key] = os.environ.get(
                    aws_key)  # will be None if not set
                os.environ[aws_key] = "testing"

            if self.resource_name == "s3":
                from moto import mock_s3 as moto_mock
            elif self.resource_name == "sns":
                from moto import mock_sns as moto_mock
            elif self.resource_name == "sqs":
                from moto import mock_sqs as moto_mock
            elif self.resource_name == "dynamodb":
                from moto import mock_dynamodb2 as moto_mock
            else:
                from moto import mock_iam as moto_mock

            self._moto_mock = moto_mock()
            self._moto_mock.start()
            region = "us-east-1"
            self.resource = boto3.resource(self.resource_name,
                                           region_name=region)  # type: ignore
            self.client = boto3.client(self.resource_name,
                                       region_name=region)  # type: ignore
            if self.resource_name == "s3":
                self.resource.create_bucket(
                    Bucket="testawsimple")  # todo: put this in the test code

        elif self.resource_name is None:
            # just the session, but not the client or resource
            self.client = None
            self.resource = None
        else:
            # real AWS (no mock)
            self.client = self.session.client(
                self.resource_name, config=self._get_config())  # type: ignore
            self.resource = self.session.resource(
                self.resource_name, config=self._get_config())  # type: ignore
Example #11
0
def test_mock():
    s3_access = S3Access(test_awsimple_str)
    assert is_mock() == s3_access.is_mocked(
    )  # make sure that the AWSAccess instance is actually using mocking
Example #12
0
from balsa import Balsa

from awsimple import __application_name__, __author__, is_mock, use_moto_mock_env_var, S3Access

from test_awsimple import test_awsimple_str, temp_dir, cache_dir

mock_env_var = os.environ.get(use_moto_mock_env_var)

if mock_env_var is None:
    os.environ[use_moto_mock_env_var] = "1"

# if using non-local pytest, create the credentials and config files dynamically
aws_credentials_and_config_dir = Path(Path.home(), ".aws")
aws_credentials_file = Path(aws_credentials_and_config_dir, "credentials")
aws_config_file = Path(aws_credentials_and_config_dir, "config")
if is_mock():
    if not aws_credentials_and_config_dir.exists():
        aws_credentials_and_config_dir.mkdir(parents=True, exist_ok=True)
    if not aws_credentials_file.exists():
        credential_strings = [
            "[default]\naws_access_key_id=AAAAAAAAAAAAAAAAAAAA\naws_secret_access_key=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
            f"[{test_awsimple_str}]\naws_access_key_id=AAAAAAAAAAAAAAAAAAAA\naws_secret_access_key=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
        ]
        aws_credentials_file.write_text("\n".join(credential_strings))
    if not aws_config_file.exists():
        config_strings = [
            "[profile default]\nregion=us-west-2",
            f"[profile {test_awsimple_str}]\nregion=us-west-2"
        ]
        aws_config_file.write_text("\n".join(config_strings))
def test_s3_multiple_transfers(s3_access):

    s3_paths = {}
    rmtree(temp_dir)
    for test_string in ["a", "b"]:
        s3_paths[test_string] = {}
        for mode in ["in", "out"]:
            p = Path(temp_dir, mode, f"{test_string}.txt")
            p.parent.mkdir(parents=True, exist_ok=True)
            if mode == "in":
                with p.open("w") as f:
                    f.write(test_string)
            s3_paths[test_string][mode] = p

    if is_mock():
        with pytest.raises(AWSimpleException):
            s3_access.download_cached(
                "a", s3_paths["a"]["out"])  # won't exist at first if mocked

    # upload and download file
    s3_access.upload(s3_paths["a"]["in"], "a")
    download_status = s3_access.download_cached("a", s3_paths["a"]["out"])
    assert download_status.success
    assert not download_status.cache_hit
    assert download_status.cache_write
    check_file_contents(s3_paths["a"]["out"], "a")

    # upload a different file into same bucket and check that we get the contents of that new file
    s3_access.upload(s3_paths["b"]["in"], "a")
    download_status = s3_access.download_cached("a", s3_paths["a"]["out"])
    assert download_status.success
    assert not download_status.cache_hit
    assert download_status.cache_write
    check_file_contents(s3_paths["a"]["out"], "b")

    # cached download
    download_status = s3_access.download_cached("a", s3_paths["a"]["out"])
    assert download_status.success
    assert download_status.cache_hit
    assert not download_status.cache_write
    check_file_contents(s3_paths["a"]["out"], "b")

    # put "a" back and just use regular download (not cached)
    s3_access.upload(s3_paths["a"]["in"], "a")
    assert s3_access.download("a", s3_paths["a"]["out"])
    check_file_contents(s3_paths["a"]["out"], "a")

    # write something else to that bucket
    s3_access.write_string("c", "a")
    assert s3_access.read_string("a") == "c"

    # now upload and download an object
    test_dict = {"z": 3}
    s3_access.upload_object_as_json(test_dict, "a")
    downloaded_dict = s3_access.download_object_as_json("a")
    assert test_dict == downloaded_dict
    downloaded_dict = s3_access.download_object_as_json_cached("a")
    assert test_dict == downloaded_dict

    assert len(list(cache_dir.glob(
        "*"))) == 3  # there should be 3 entries in the cache at this point