def setup_and_teardown(xml_bucket: str, json_bucket: str): """ Anything before yield executed before the test Anything after yield executed after the test """ logging.info("Setting up: create s3 and dynamodb resources") s3.create_bucket(xml_bucket) s3.create_bucket(json_bucket) with open("test/mock_data/data.xml", 'rb') as f: data = f.read().decode('utf-8') s3.put_object(xml_bucket, "data.xml", data) # dynamo create table db.create_rules_table("TestProcessingRules") # dynamo load data with open("test/mock_data/rules.json", 'rb') as f: fake_rules = json.load(f) db.load_rules(fake_rules, "TestProcessingRules") yield logging.info("Tearing down: delete s3 and dynamodb resources") s3.delete_bucket(xml_bucket) s3.delete_bucket(json_bucket) # dynamo remove table db.delete_table("TestProcessingRules")
def process_event(event: dict, to_bucket: str, rules: dict) -> None: """ Process recieved event, retrieve metadata, download xml, convert xml file to json and upload to s3 No core logic here, s3api and converter modules are unit tested """ event_name = event["eventName"] from_bucket = event["s3"]["bucket"]["name"] file_prefix = event["s3"]["object"]["key"].split(".xml")[0] logging.info( f"Received an event: Name: {event_name}, Bucket: {from_bucket}, File: {file_prefix}" ) xml_string = s3.get_object_as_string(from_bucket, f"{file_prefix}.xml") if not xml_string: raise IOError json_string = converter.xml_to_json(xml_string, rules) success = s3.put_object(to_bucket, f"{file_prefix}.json", json_string) if not success: raise IOError logging.info(f"Save ** {file_prefix}.json ** to {to_bucket} S3 bucket")
def test_get_object(prepare_bucket, bucket: str): obj = "test-obj.txt" test_content = "test body" s3.put_object(bucket, obj, test_content) string_obj = s3.get_object_as_string(bucket, obj) assert test_content == string_obj
def test_put_object(prepare_bucket, bucket: str): obj = "test-put-obj.txt" s3.put_object(bucket, obj, "test body") objects = [some_object['Key'] for some_object in s3.list_objects(bucket)] assert obj in objects