Пример #1
0
def test_dumpBucket():
    """
        Verify the dumpBucket() function is working as intended.

        Expected: Supplying the function with the arguments ("flaws.cloud", "us-west-2") should result in 6 files
                being downloaded into the buckets folder. The expected file sizes of each file are listed in the
                'expectedFiles' dictionary.
    """

    # Dump the flaws.cloud bucket
    s3.dumpBucket("flaws.cloud", "us-west-2")

    # Folder to look for the files in
    dumpDir = './buckets/flaws.cloud/'

    # Expected sizes of each file
    expectedFiles = {
        'hint1.html': 2575,
        'hint2.html': 1707,
        'hint3.html': 1101,
        'index.html': 2877,
        'robots.txt': 46,
        'secret-dd02c7c.html': 1051
    }

    try:
        # Assert number of files in the folder
        assert len(os.listdir(dumpDir)) == len(expectedFiles)

        # For each file, assert the size
        for file, size in expectedFiles.items():
            assert os.path.getsize(dumpDir + file) == size
    finally:
        # No matter what happens with the asserts, cleanup after the test by deleting the flaws.cloud directory
        shutil.rmtree(dumpDir)
Пример #2
0
def test_dumpBucket():
    """
    Scenario dumpBucket.1 - Public read permission enabled
        Expected: Dumping the bucket "flaws.cloud" should result in 6 files being downloaded into the buckets folder.
                    The expected file sizes of each file are listed in the 'expectedFiles' dictionary.
    Scenario dumpBucket.2 - Public read objects disabled
        Expected: The function returns false and the bucket directory doesn't exist
    Scenario dumpBucket.3 - Authenticated users read enabled, public users read disabled
        Expected: The function returns true and the bucket directory exists. Opposite for if no aws creds are set
    """
    test_setup()

    # dumpBucket.1

    s3.dumpBucket("flaws.cloud")

    dumpDir = './buckets/flaws.cloud/'  # Folder to look for the files in

    # Expected sizes of each file
    expectedFiles = {
        'hint1.html': 2575,
        'hint2.html': 1707,
        'hint3.html': 1101,
        'index.html': 3082,
        'robots.txt': 46,
        'secret-dd02c7c.html': 1051,
        'logo.png': 15979
    }

    try:
        # Assert number of files in the folder
        assert len(os.listdir(dumpDir)) == len(expectedFiles)

        # For each file, assert the size
        for file, size in expectedFiles.items():
            assert os.path.getsize(dumpDir + file) == size
    finally:
        # No matter what happens with the asserts, cleanup after the test by deleting the flaws.cloud directory
        shutil.rmtree(dumpDir)

    # dumpBucket.2
    assert s3.dumpBucket('app-dev') is False
    assert os.path.exists('./buckets/app-dev') is False

    # dumpBucket.3
    assert s3.dumpBucket(
        '1904'
    ) is s3.awsCredsConfigured  # Asserts should both follow whether or not creds are set
    assert os.path.exists('./buckets/1904') is s3.awsCredsConfigured
Пример #3
0
def test_dumpBucket():
    """
    Scenario dumpBucket.1 - Public read permission enabled
        Expected: Supplying the function with the arguments ("flaws.cloud", "us-west-2") should result in 6 files
                being downloaded into the buckets folder. The expected file sizes of each file are listed in the
                'expectedFiles' dictionary.
    Scenario dumpBucket.2 - Public read permission disabled
    Scenario dumpBucket.3 - Authenticated users read enabled, public users read disabled
    """
    test_setup()

    # dumpBucket.1

    s3.dumpBucket("flaws.cloud")

    dumpDir = './buckets/flaws.cloud/'  # Folder to look for the files in

    # Expected sizes of each file
    expectedFiles = {
        'hint1.html': 2575,
        'hint2.html': 1707,
        'hint3.html': 1101,
        'index.html': 2877,
        'robots.txt': 46,
        'secret-dd02c7c.html': 1051
    }

    try:
        # Assert number of files in the folder
        assert len(os.listdir(dumpDir)) == len(expectedFiles)

        # For each file, assert the size
        for file, size in expectedFiles.items():
            assert os.path.getsize(dumpDir + file) == size
    finally:
        # No matter what happens with the asserts, cleanup after the test by deleting the flaws.cloud directory
        shutil.rmtree(dumpDir)

    # dumpBucket.2
    # Possibly split into multiple functions

    # dumpBucket.3

    raise NotImplementedError
Пример #4
0
            bucket = line.split(":")[0]
        else:  # We were either given a bucket name or domain name
            bucket = line

        result = s3.checkBucket(bucket, region)

        if result[0] == 301:
            result = s3.checkBucket(bucket, result[1])

        if result[0] in [900, 404]:  # These are our 'bucket not found' codes
            slog.error(result[1])

        elif result[
                0] == 403:  # Found but closed bucket. Only log if user says to.
            message = "{0:>15} : {1}".format("[found] [closed]",
                                             result[1] + ":" + result[2])
            slog.warning(message)
            if args.includeClosed:  # If user supplied '--include-closed' flag, log this bucket to file
                flog.debug(result[1] + ":" + result[2])

        elif result[0] == 200:  # The only 'bucket found and open' codes
            message = "{0:<7}{1:>9} : {2}".format(
                "[found]", "[open]",
                result[1] + ":" + result[2] + " - " + result[3])
            slog.info(message)
            flog.debug(result[1] + ":" + result[2])
            if args.dump:
                s3.dumpBucket(bucket, result[2])
        else:
            raise ValueError("Got back unknown code from checkBucket()")
Пример #5
0
        if not valid:
            message = "{0:>11} : {1}".format("[invalid]", bucket)
            slog.error(message)
            continue

        if s3.awsCredsConfigured:
            b = s3.checkAcl(bucket)
        else:
            a = s3.checkBucketWithoutCreds(bucket)
            b = {"found": a, "acls": "unknown - no aws creds"}

        if b["found"]:

            size = s3.getBucketSize(
                bucket)  # Try to get the size of the bucket

            message = "{0:>11} : {1}".format(
                "[found]",
                bucket + " | " + size + " | ACLs: " + str(b["acls"]))
            slog.info(message)
            flog.debug(bucket)

            if args.dump:
                s3.dumpBucket(bucket)
            if args.list:
                if str(b["acls"]) not in ["AccessDenied", "AllAccessDisabled"]:
                    s3.listBucket(bucket)
        else:
            message = "{0:>11} : {1}".format("[not found]", bucket)
            slog.error(message)