Ejemplo n.º 1
0
def collect_bucket(account: AWSAccount, bucket_name):
    session = aws_session(account.id, account.role)
    s3 = session.resource("s3")
    dbs = Session()

    log.info(
        f"Collecting all objects in AWS S3 bucket {bucket_name} in {account.rtdname}"
    )
    bucket = s3.Bucket(bucket_name)
    for bucket_object in bucket.objects.all():
        # TODO: this could be highly optimized via batching
        if (not ArgumentParser.args.aws_s3_skip_checks
                and dbs.query(BucketObject).filter_by(
                    account=account.id,
                    bucket_name=bucket_name,
                    name=bucket_object.key).scalar() is not None):
            continue

        bo = BucketObject(
            account=account.id,
            bucket_name=bucket_name,
            name=bucket_object.key,
            size=bucket_object.size,
            mtime=bucket_object.last_modified,
        )
        dbs.add(bo)
    dbs.commit()
Ejemplo n.º 2
0
def collect_buckets(account: AWSAccount):
    session = aws_session(account.id, account.role)
    client = session.client("s3")
    dbs = Session()
    response = client.list_buckets()
    buckets = response.get("Buckets", [])

    log.info(f"Collecting all buckets in {account.rtdname}")

    for bucket in buckets:
        bucket_name = bucket.get("Name")
        bucket_ctime = bucket.get("CreationDate")

        if (not ArgumentParser.args.aws_s3_skip_checks
                and dbs.query(Bucket).filter_by(account=account.id,
                                                name=bucket_name).scalar()
                is not None):
            continue

        b = Bucket(account=account.id, name=bucket_name, ctime=bucket_ctime)
        log.info(f"Found bucket {bucket_name} in {account.rtdname}")
        dbs.add(b)
    dbs.commit()

    return dbs.query(Bucket).filter_by(account=account.id)
Ejemplo n.º 3
0
def main():
    if ArgumentParser.args.aws_role and ArgumentParser.args.aws_account:
        accounts = [
            AWSAccount(aws_account_id, {}, role=ArgumentParser.args.aws_role)
            for aws_account_id in ArgumentParser.args.aws_account
        ]
    else:
        accounts = [AWSAccount(current_account_id(), {})]

    if len(accounts) != 1:
        log.error("This tool only supports a single account at a time")
        sys.exit(1)

    account = accounts[0]
    session = aws_session(account.id, account.role)
    client = session.client("s3")
    bucket = ArgumentParser.args.aws_s3_bucket
    prefix = ArgumentParser.args.aws_s3_prefix

    mtime = (
        make_valid_timestamp(datetime.fromisoformat(ArgumentParser.args.aws_s3_mtime))
        if ArgumentParser.args.aws_s3_mtime
        else None
    )

    is_truncated = True
    max_keys = 500
    key_marker = None
    version_id_marker = None

    while is_truncated is True:
        if key_marker and version_id_marker:
            version_list = client.list_object_versions(
                Bucket=bucket,
                MaxKeys=max_keys,
                Prefix=prefix,
                KeyMarker=key_marker,
                VersionIdMarker=version_id_marker,
            )
        elif key_marker:
            version_list = client.list_object_versions(
                Bucket=bucket, MaxKeys=max_keys, Prefix=prefix, KeyMarker=key_marker
            )
        else:
            version_list = client.list_object_versions(
                Bucket=bucket, MaxKeys=max_keys, Prefix=prefix
            )
        is_truncated = version_list.get("IsTruncated", False)
        key_marker = version_list.get("NextKeyMarker")
        version_id_marker = version_list.get("NextVersionIdMarker")

        delete_objects = []
        versions = version_list.get("Versions", [])
        versions.extend(version_list.get("DeleteMarkers", []))
        for v in versions:
            object_version = v["VersionId"]
            object_key = v["Key"]
            # object_size = v["Size"]
            object_mtime = make_valid_timestamp(v["LastModified"])

            if mtime and object_mtime > mtime:
                log.debug(
                    f"Object {object_key} with mtime {object_mtime} newer than mtime {mtime}"
                )
                continue
            if (
                ArgumentParser.args.aws_s3_pattern
                and bool(re.search(ArgumentParser.args.aws_s3_pattern, str(object_key)))
                is False
            ):
                log.debug(
                    f"Object {object_key} does not match {ArgumentParser.args.aws_s3_pattern}"
                )
                continue
            log.info(
                (
                    f"Object {object_key} with version {object_version} and mtime"
                    f" {object_mtime} matches {ArgumentParser.args.aws_s3_pattern}"
                )
            )
            delete_objects.append({"VersionId": object_version, "Key": object_key})
        try:
            if len(delete_objects) > 0:
                str_delete_objects = "\n".join([do["Key"] for do in delete_objects])
                if ArgumentParser.args.aws_s3_yes is True:
                    confirm_delete = True
                else:
                    confirm_delete = button_dialog(
                        title=f"Delete {len(delete_objects)} S3 objects?",
                        text=f"Really delete these objects?\n{str_delete_objects}",
                        buttons=[("Yes", True), ("No", False), ("Abort", None)],
                    ).run()

                if confirm_delete is None:
                    sys.exit(0)
                elif confirm_delete is True:
                    response = client.delete_objects(
                        Bucket=bucket, Delete={"Objects": delete_objects}
                    )
                    log.info(f"Delete response {response}")
        except Exception:
            log.exception("Something went wrong trying to delete")