예제 #1
0
def test_getBucketSize():
    """
    Scenario getBucketSize.2 - Public read enabled
        Expected: The flaws.cloud bucket returns size: 9.1KiB
    Scenario getBucketSize.3 - Public read disabled
        Expected: app-dev bucket has public read permissions disabled
    Scenario getBucketSize.4 - Bucket doesn't exist
        Expected: We should get back "NoSuchBucket"
    """
    test_setup()

    # getBucketSize.2
    assert s3.getBucketSize('flaws.cloud') == "24.9 KiB"

    # getBucketSize.3
    assert s3.getBucketSize('app-dev') == "AccessDenied"

    # getBucketSize.4
    assert s3.getBucketSize('thiswillprobablynotexistihope') == "NoSuchBucket"
예제 #2
0
def test_getBucketSize():
    """
    Scenario 1: Bucket doesn't exist
        Expected: 255

    Scenario 2: Bucket exists, listing open to public
        Expected:
            Size: 9.1 KiB
        Note:
            Using flaws.cloud as example by permission of owner (@0xdabbad00)

    """

    # Scenario 1
    try:
        result = s3.getBucketSize('example-this-hopefully-wont-exist-123123123')
    except sh.ErrorReturnCode_255:
        assert True

    # Scenario 3
    assert s3.getBucketSize('flaws.cloud') == "9.1 KiB"
예제 #3
0
def test_getBucketSize():
    """
    Scenario getBucketSize.1 - Public read enabled
        Expected: The s3scanner-bucketsize bucket returns size: 43 bytes
    Scenario getBucketSize.2 - Public read disabled
        Expected: app-dev bucket has public read permissions disabled
    Scenario getBucketSize.3 - Bucket doesn't exist
        Expected: We should get back "NoSuchBucket"
    Scenario getBucketSize.4 - Public read enabled, more than 1,000 objects
        Expected: The s3scanner-long bucket returns size: 3900 bytes
    """
    test_setup()

    # getBucketSize.1
    assert s3.getBucketSize('s3scanner-bucketsize') == "43 bytes"

    # getBucketSize.2
    assert s3.getBucketSize('app-dev') == "AccessDenied"

    # getBucketSize.3
    assert s3.getBucketSize('thiswillprobablynotexistihope') == "NoSuchBucket"

    # getBucketSize.4
    assert s3.getBucketSize('s3scanner-long') == "4000 bytes"
예제 #4
0
def test_getBucketSizeTimeout():
    """
    Verify that dumpBucket() times out after X amount of seconds on buckets with many files.

    Expected:
        Use e27.co to test with. Verify that getBucketSize returns an unknown size and doesn't take longer
        than sizeCheckTimeout set in s3utils
    """

    startTime = time.time()

    output = s3.getBucketSize("e27.co")
    duration = time.time() - startTime

    # Assert that getting the bucket size took less than or equal to the alloted time plus 1 second to account
    # for processing time.
    assert duration <= s3.sizeCheckTimeout + 1
    assert output == "Unknown Size"
예제 #5
0
def test_getBucketSizeTimeout():
    """
    Scenario getBucketSize.1 - Too many files to list so it times out
        Expected: The function returns a timeout error after the specified wait time
        Note: Use e27.co to test with. Verify that getBucketSize returns an unknown size and doesn't take longer
        than sizeCheckTimeout set in s3utils
    """
    test_setup()

    s3.awsCredsConfigured = False

    startTime = time.time()

    output = s3.getBucketSize("e27.co")
    duration = time.time() - startTime

    # Assert that getting the bucket size took less than or equal to the alloted time plus 1 second to account
    # for processing time.
    assert duration <= s3.sizeCheckTimeout + 1
    assert output == "Unknown Size - timeout"
예제 #6
0
def test_getBucketSizeTimeout():
    """
    Scenario getBucketSize.1 - Too many files to list so it times out
        Expected: The function returns a timeout error after the specified wait time
        Note: Use e27.co to test with. Verify that getBucketSize returns an unknown size and doesn't take longer
        than sizeCheckTimeout set in s3utils
    """
    test_setup()

    s3.AWS_CREDS_CONFIGURED = False
    s3.SIZE_CHECK_TIMEOUT = 2  # In case we have a fast connection

    startTime = time.time()

    output = s3.getBucketSize("s3scanner-long")
    duration = time.time() - startTime

    # Assert that getting the bucket size took less than or equal to the alloted time plus 1 second to account
    # for processing time.
    assert duration <= s3.SIZE_CHECK_TIMEOUT + 1
    assert output == "Unknown Size - timeout"
예제 #7
0
def test_getBucketSize():
    """
    Scenario getBucketSize.2 - Public read enabled
        Expected: The flaws.cloud bucket returns size: 9.1KiB
    Scenario getBucketSize.3 - Public read disabled
    Scenario getBucketSize.4 - Bucket doesn't exist
    """
    test_setup()

    # getBucketSize.2
    assert s3.getBucketSize('flaws.cloud') == "9.1 KiB"

    # getBucketSize.3

    # getBucketSize.4

    # try:
    #     s3.getBucketSize('example-this-hopefully-wont-exist-123123123')
    # except sh.ErrorReturnCode_255:
    #     assert True

    raise NotImplementedError
예제 #8
0
        valid = s3.checkBucketName(bucket)

        if not valid:
            message = "{0:>11} : {1}".format("[invalid]", bucket)
            slog.error(message)
            continue

        if s3.awsCredsConfigured:
            b = s3.checkAcl(bucket)
        else:
            a = s3.checkBucketWithoutCreds(bucket)
            b = {"found": a, "acls": "unknown - no aws creds"}

        if b["found"]:

            size = s3.getBucketSize(
                bucket)  # Try to get the size of the bucket

            message = "{0:>11} : {1}".format(
                "[found]",
                bucket + " | " + size + " | ACLs: " + str(b["acls"]))
            slog.info(message)
            flog.debug(bucket)

            if args.dump:
                s3.dumpBucket(bucket)
            if args.list:
                if str(b["acls"]) not in ["AccessDenied", "AllAccessDisabled"]:
                    s3.listBucket(bucket)
        else:
            message = "{0:>11} : {1}".format("[not found]", bucket)
            slog.error(message)