Пример #1
0
def test_checkBucketInvalidName():
    """
    Scenario 1: Name too short - 2 characters
        Expected: checkBucket() should return 999

    Scenario 2: Name too long - 75 characters
        Expected: checkBucket() should return 999

    Scenario 3: Name contains bad characters
        Expected: checkBucket() should return 999

    """
    test_setup()

    # Scenario 1
    result = s3.checkBucket('ab', 'us-west-1')
    assert result[0] == 999

    # Scenario 2
    tooLong = "asdfasdf12834092834nMSdfnasjdfhu23y49u2y4jsdkfjbasdfbasdmn4asfasdf23423423423423"  # 80 characters
    result = s3.checkBucket(tooLong, 'us-east-1')
    assert result[0] == 999

    # Scenario 3
    badBucket = "mycoolbucket:dev"
    assert s3.checkBucket(badBucket, 'us-west-2')[0] == 999
Пример #2
0
def test_checkBucket():
    """
    Scenario 1: Bucket name exists, region is wrong
        Expected:
            Code: 301
            Region: Region returned depends on the closest S3 region to the user. Since we don't know this,
                    just assert for 2 hyphens.
        Note:
            Amazon should always give us a 301 to redirect to the nearest s3 endpoint.
            Currently uses the ap-south-1 (Asia Pacific - Mumbai) region, so if you're running
            the test near there, change to a region far from
            you - https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region

    Scenario 2: Bucket exists, region correct
        Expected:
            Code: 200
            Message: Contains the domain name and region
        Note:
            Using flaws.cloud as example by permission of owner (@0xdabbad00)

    """
    # Scenario 1
    result = s3.checkBucket('amazon.com', 'ap-south-1')
    assert result[0] == 301
    assert result[1].count("-") == 2

    # Scenario 2
    result = s3.checkBucket('flaws.cloud', 'us-west-2')
    assert result[0] == 200
    assert result[1] == 'flaws.cloud'
    assert result[2] == 'us-west-2'
Пример #3
0
    def ScanS3Bucket(self):
        files = glob.glob(self.globalVariables.outputDir + "*.txt")
        if not s3.checkAwsCreds():
            s3.awsCredsConfigured = False
            slog.error(
                "Warning: AWS credentials not configured. Open buckets will be shown as closed. Run: `aws configure` to fix this.\n"
            )
        else:
            for file in files:
                s3Bucket = self.globalVariables.s3Bucket + (
                    file[file.rfind("/") - len(file) + 1:])

                # Create file logger
                flog = logging.getLogger('s3scanner-file')
                flog.setLevel(logging.DEBUG)  # Set log level for logger object

                # Create file handler which logs even debug messages
                fh = logging.FileHandler(s3Bucket)
                fh.setLevel(logging.DEBUG)

                # Add the handler to logger
                flog.addHandler(fh)

                # Create secondary logger for logging to screen
                slog = logging.getLogger('s3scanner-screen')
                slog.setLevel(logging.INFO)

                levelStyles = {
                    'info': {
                        'color': 'blue'
                    },
                    'warning': {
                        'color': 'yellow'
                    },
                    'error': {
                        'color': 'red'
                    }
                }

                fieldStyles = {'asctime': {'color': 'white'}}

                # Use coloredlogs to add color to screen logger. Define format and styles.
                coloredlogs.install(level='DEBUG',
                                    logger=slog,
                                    fmt='%(asctime)s   %(message)s',
                                    level_styles=levelStyles,
                                    field_styles=fieldStyles)
                with open(file, "r") as f:
                    for line in f:
                        domain = line.split('\n')
                        s3.checkBucket(domain[0], slog, flog, True, True)
Пример #4
0
def test_checkBucket():
    """
    checkBucket.1 - Bucket name
    checkBucket.2 - Domain name
    checkBucket.3 - Full s3 url
    checkBucket.4 - bucket:region
    """

    test_setup()

    testFile = './test/test_checkBucket.txt'

    # Create file logger
    flog = logging.getLogger('s3scanner-file')
    flog.setLevel(logging.DEBUG)  # Set log level for logger object

    # Create file handler which logs even debug messages
    fh = logging.FileHandler(testFile)
    fh.setLevel(logging.DEBUG)

    # Add the handler to logger
    flog.addHandler(fh)

    # Create secondary logger for logging to screen
    slog = logging.getLogger('s3scanner-screen')
    slog.setLevel(logging.CRITICAL)

    try:
        # checkBucket.1
        s3.checkBucket("flaws.cloud", slog, flog, False, False)

        # checkBucket.2
        s3.checkBucket("flaws.cloud.s3-us-west-2.amazonaws.com", slog, flog,
                       False, False)

        # checkBucket.3
        s3.checkBucket("flaws.cloud:us-west-2", slog, flog, False, False)

        # Read in test loggin file and assert
        f = open(testFile, 'r')
        results = f.readlines()
        f.close()

        assert results[0].rstrip() == "flaws.cloud"
        assert results[1].rstrip() == "flaws.cloud"
        assert results[2].rstrip() == "flaws.cloud"

    finally:
        # Delete test file
        os.remove(testFile)
Пример #5
0
        #   bucket name   i.e. mybucket
        #   domain name   i.e. flaws.cloud
        #   full S3 url   i.e. flaws.cloud.s3-us-west-2.amazonaws.com
        #   bucket:region i.e. flaws.cloud:us-west-2

        if ".amazonaws.com" in line:  # We were given a full s3 url
            bucket = line[:line.rfind(".s3")]
            region = line[len(line[:line.rfind(".s3")]) +
                          4:line.rfind(".amazonaws.com")]
        elif ":" in line:  # We were given a bucket in 'bucket:region' format
            region = line.split(":")[1]
            bucket = line.split(":")[0]
        else:  # We were either given a bucket name or domain name
            bucket = line

        result = s3.checkBucket(bucket, region)

        if result[0] == 301:
            result = s3.checkBucket(bucket, result[1])

        if result[0] in [900, 404]:  # These are our 'bucket not found' codes
            slog.error(result[1])

        elif result[
                0] == 403:  # Found but closed bucket. Only log if user says to.
            message = "{0:>15} : {1}".format("[found] [closed]",
                                             result[1] + ":" + result[2])
            slog.warning(message)
            if args.includeClosed:  # If user supplied '--include-closed' flag, log this bucket to file
                flog.debug(result[1] + ":" + result[2])
    'error': {
        'color': 'red'
    }
}

fieldStyles = {'asctime': {'color': 'white'}}

# Use coloredlogs to add color to screen logger. Define format and styles.
coloredlogs.install(level='DEBUG',
                    logger=slog,
                    fmt='%(asctime)s   %(message)s',
                    level_styles=levelStyles,
                    field_styles=fieldStyles)

if not s3.checkAwsCreds():
    s3.awsCredsConfigured = False
    slog.error(
        "Warning: AWS credentials not configured. Open buckets will be shown as closed. Run:"
        " `aws configure` to fix this.\n")

if os.path.isfile(args.buckets):
    with open(args.buckets, 'r') as f:
        for line in f:
            line = line.rstrip()  # Remove any extra whitespace
            for l in newBucketNames(line, args.wordlist, args.isSubdomain):
                s3.checkBucket(l, slog, flog, args.dump, args.list)
else:
    # It's a single bucket
    for l in newBucketNames(args.buckets, args.wordlist, args.isSubdomain):
        s3.checkBucket(l, slog, flog, args.dump, args.list)
Пример #7
0
        'color': 'yellow'
    },
    'error': {
        'color': 'red'
    }
}

fieldStyles = {'asctime': {'color': 'white'}}

# Use coloredlogs to add color to screen logger. Define format and styles.
coloredlogs.install(level='DEBUG',
                    logger=slog,
                    fmt='%(asctime)s   %(message)s',
                    level_styles=levelStyles,
                    field_styles=fieldStyles)

if not s3.checkAwsCreds():
    s3.AWS_CREDS_CONFIGURED = False
    slog.error(
        "Warning: AWS credentials not configured. Open buckets will be shown as closed. Run:"
        " `aws configure` to fix this.\n")

if path.isfile(args.buckets):
    with open(args.buckets, 'r') as f:
        for line in f:
            line = line.rstrip()  # Remove any extra whitespace
            s3.checkBucket(line, slog, flog, args.dump, args.list)
else:
    # It's a single bucket
    s3.checkBucket(args.buckets, slog, flog, args.dump, args.list)
Пример #8
0
    }
}

fieldStyles = {'asctime': {'color': 'white'}}

# Use coloredlogs to add color to screen logger. Define format and styles.
coloredlogs.install(level='DEBUG',
                    logger=slog,
                    fmt='%(asctime)s   %(message)s',
                    level_styles=levelStyles,
                    field_styles=fieldStyles)

with open(args.domains, 'r') as f:
    for line in f:
        site = line.rstrip()  # Remove any extra whitespace
        result = s3.checkBucket(site, args.defaultRegion)

        if result[0] == 301:
            result = s3.checkBucket(site, result[1])

        if result[0] in [900, 404]:  # These are our 'bucket not found' codes
            slog.error(result[1])

        elif result[
                0] == 403:  # Found but closed bucket. Only log if user says to.
            message = "{0:>15} : {1}".format("[found] [closed]",
                                             result[1] + ":" + result[2])
            slog.warning(message)
            if args.includeClosed:  # If user supplied '--include-closed' flag, log this bucket to file
                flog.debug(result[1] + ":" + result[2])