def test_lite_bucket_schema_for_events(historical_table, bucket_event): old_fields = CONFIG.exclude_fields CONFIG.exclude_fields = "Name,_version,Grants,LifecycleRules,Logging,Policy,Tags,Versioning,Website,Cors," \ "Notifications,Acceleration,Replication,CreationDate,AnalyticsConfigurations," \ "MetricsConfigurations,InventoryConfigurations".split(",") all_buckets = CurrentS3Model.scan() generated_report = S3ReportSchema(strict=True).dump({"all_buckets": all_buckets}).data generated_report["all_buckets"] = [] process_dynamodb_record(bucket_event["Records"][0], generated_report) lite_report = S3ReportSchema(strict=True).dump(generated_report).data assert lite_report["generated_date"] assert lite_report["s3_report_version"] == CONFIG.s3_reports_version assert not lite_report.get("all_buckets") assert lite_report["buckets"]["testbucketNEWBUCKET"] assert len(lite_report["buckets"]) == 11 for bucket in lite_report["buckets"].values(): keys = bucket.keys() for excluded in CONFIG.exclude_fields: assert excluded not in keys assert bucket["AccountId"] == "123456789012" assert bucket["Region"] == "us-east-1" # Clean-up: CONFIG.exclude_fields = old_fields
def test_light_bucket_schema(historical_table): old_fields = CONFIG.exclude_fields CONFIG.exclude_fields = "Name,_version,Grants,LifecycleRules,Logging,Policy,Tags,Versioning,Website,Cors," \ "Notifications,Acceleration,Replication,CreationDate,AnalyticsConfigurations," \ "MetricsConfigurations,InventoryConfigurations".split(",") all_buckets = CurrentS3Model.scan() generated_file = S3ReportSchema(strict=True).dump({ "all_buckets": all_buckets }).data assert generated_file["generated_date"] assert generated_file["s3_report_version"] == CONFIG.s3_reports_version assert len(generated_file["buckets"]) == 10 assert not generated_file.get("all_buckets") for bucket in generated_file["buckets"].values(): keys = bucket.keys() for excluded in CONFIG.exclude_fields: assert excluded not in keys assert bucket["AccountId"] == "123456789012" assert bucket["Region"] == "us-east-1" # Clean-up: CONFIG.exclude_fields = old_fields
def test_bucket_schema(historical_table): all_buckets = CurrentS3Model.scan() generated_file = S3ReportSchema(strict=True).dump({"all_buckets": all_buckets}).data assert generated_file["generated_date"] assert generated_file["s3_report_version"] == CONFIG.s3_reports_version assert not generated_file.get("all_buckets") for name, value in generated_file["buckets"].items(): assert value["AccountId"] == "123456789012" assert value["Region"] == "us-east-1" assert value["Tags"]["theBucketName"] == name assert not value.get("_version") assert not value.get("Name")
def dump_report(commit=True): # Get all the data from DynamoDB: log.debug("Starting... Beginning scan.") all_buckets = CurrentS3Model.scan() generated_file = S3ReportSchema(strict=True).dump({ "all_buckets": all_buckets }).data # Dump to S3: if commit: log.debug("Saving to S3.") # Replace <empty> with "" <-- Due to Pynamo/Dynamo issues... dump_to_s3( json.dumps(generated_file, indent=4).replace("\"<empty>\"", "\"\"").encode("utf-8")) else: log.debug("Commit flag not set, not saving.") log.debug("Completed S3 report generation.")
def generated_file(historical_table): all_buckets = CurrentS3Model.scan() return S3ReportSchema(strict=True).dumps({ "all_buckets": all_buckets }).data.encode("utf-8")