def test__update_content__log_location_and_subnet__fields_set(self): "AsiaqDataPipeline.update_content with log location and subnet ID" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty") new_contents = [{ 'id': 'Default', 'fields': [{ 'key': 'uninteresting', 'stringValue': 'thing' }], 'name': 'short' }, { 'id': 'Other', 'fields': [], 'name': 'unused' }] pipeline.update_content(log_location='FAKEFAKE', subnet_id='EVENFAKER', contents=new_contents) self.assertIn({ 'key': 'pipelineLogUri', 'stringValue': 'FAKEFAKE' }, pipeline._objects[0]['fields']) self.assertIn({ 'key': 'subnetId', 'stringValue': 'EVENFAKER' }, pipeline._objects[0]['fields'])
def test__description_only_object__content_and_persisted_false(self): "AsiaqDataPipeline construction with only required args" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty") self.assertEqual(pipeline._name, "asdf") self.assertEqual(pipeline._description, "qwerty") self.assertFalse(pipeline._tags) self.assertFalse(pipeline.is_persisted()) self.assertFalse(pipeline.has_content())
def test__update_content__no_values__content_updated(self): "AsiaqDataPipeline.update_content with no parameter values" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty") pipeline_objects = Mock() param_defs = Mock() pipeline.update_content(pipeline_objects, param_defs) self.assertIs(pipeline._objects, pipeline_objects) self.assertIs(pipeline._params, param_defs) self.assertIsNone(pipeline._param_values)
def test__update_content__new_and_old_values__values_updated(self): "AsiaqDataPipeline.update_content overwrites parameter values when appropriate" orig_values = {'this': 'will', 'be': 'overwritten'} pipeline = AsiaqDataPipeline(name="asdf", description="qwerty", param_values=orig_values) new_values = {'foo': 'bar', 'baz': '1'} pipeline_objects = Mock() param_defs = Mock() pipeline.update_content(pipeline_objects, param_defs, new_values) self.assertEqual([{'id': 'foo', 'stringValue': 'bar'}, {'id': 'baz', 'stringValue': '1'}], pipeline._param_values)
def test__update_content__log_location_and_subnet__fields_set(self): "AsiaqDataPipeline.update_content with log location and subnet ID" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty") new_contents = [ {'id': 'Default', 'fields': [{'key': 'uninteresting', 'stringValue': 'thing'}], 'name': 'short'}, {'id': 'Other', 'fields': [], 'name': 'unused'} ] pipeline.update_content(log_location='FAKEFAKE', subnet_id='EVENFAKER', contents=new_contents) self.assertIn({'key': 'pipelineLogUri', 'stringValue': 'FAKEFAKE'}, pipeline._objects[0]['fields']) self.assertIn({'key': 'subnetId', 'stringValue': 'EVENFAKER'}, pipeline._objects[0]['fields'])
def test__update_content__old_values_new_empty__values_cleared(self): "AsiaqDataPipeline.update_content does not overwrite parameter values when not appropriate" orig_values = {'this': 'will', 'be': 'overwritten'} pipeline = AsiaqDataPipeline(name="asdf", description="qwerty", param_values=orig_values) pipeline_objects = Mock() param_defs = Mock() pipeline.update_content(pipeline_objects, param_defs, []) self.assertEqual([], pipeline._param_values)
def test__update_content__old_values_not_new_ones__values_unchanged(self): "AsiaqDataPipeline.update_content does not overwrite parameter values when not appropriate" orig_values = {'this': 'will not', 'be': 'overwritten'} pipeline = AsiaqDataPipeline(name="asdf", description="qwerty", param_values=orig_values) pipeline_objects = Mock() param_defs = Mock() pipeline.update_content(pipeline_objects, param_defs) self.assertEqual( [{'id': 'this', 'stringValue': 'will not'}, {'id': 'be', 'stringValue': 'overwritten'}], pipeline._param_values )
def test__update_content__old_values_new_empty__values_cleared(self): "AsiaqDataPipeline.update_content does not overwrite parameter values when not appropriate" orig_values = {'this': 'will', 'be': 'overwritten'} pipeline = AsiaqDataPipeline(name="asdf", description="qwerty", param_values=orig_values) pipeline_objects = Mock() param_defs = Mock() pipeline.update_content(pipeline_objects, param_defs, []) self.assertEqual( [], pipeline._param_values )
def test__update_content__dict_values__content_updated(self): "AsiaqDataPipeline.update_content with silly dictionary parameter values" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty") pipeline_objects = Mock() param_defs = Mock() param_values = {'foo': 'bar', 'baz': '1'} pipeline.update_content(pipeline_objects, param_defs, param_values) self.assertIs(pipeline._objects, pipeline_objects) self.assertIs(pipeline._params, param_defs) self.assertEqual(2, len(pipeline._param_values)) self.assertIs(list, type(pipeline._param_values)) self.assertIn({'id': 'foo', 'stringValue': 'bar'}, pipeline._param_values) self.assertIn({'id': 'baz', 'stringValue': '1'}, pipeline._param_values) self.assertEqual({"foo": "bar", "baz": "1"}, pipeline.get_param_value_dict())
def test__get_tag_dict__tags_list_passed__correct_return(self): "AsiaqDataPipeline.get_tag_dict with tags passed as list" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty", tags=[{ 'key': 'template', 'value': 'silly' }, { 'key': 'another', 'value': 'tag' }]) self.assertEqual({ "template": "silly", "another": "tag" }, pipeline.get_tag_dict())
def test__update_content__list_values__content_updated(self): "AsiaqDataPipeline.update_content with silly listed parameter values" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty") pipeline_objects = Mock() param_defs = Mock() param_values = [ {'id': 'foo', 'stringValue': 'bar'}, {'id': 'bar', 'stringValue': 'baz'}, {'id': 'simple', 'stringValue': 'string'} ] pipeline.update_content(pipeline_objects, param_defs, param_values) self.assertIs(pipeline._objects, pipeline_objects) self.assertIs(pipeline._params, param_defs) self.assertEqual(3, len(pipeline._param_values)) self.assertEqual({"foo": "bar", "bar": "baz", "simple": "string"}, pipeline.get_param_value_dict())
def test__update_content__old_values_not_new_ones__values_unchanged(self): "AsiaqDataPipeline.update_content does not overwrite parameter values when not appropriate" orig_values = {'this': 'will not', 'be': 'overwritten'} pipeline = AsiaqDataPipeline(name="asdf", description="qwerty", param_values=orig_values) pipeline_objects = Mock() param_defs = Mock() pipeline.update_content(pipeline_objects, param_defs) self.assertEqual([{ 'id': 'this', 'stringValue': 'will not' }, { 'id': 'be', 'stringValue': 'overwritten' }], pipeline._param_values)
def test__from_template__log_and_subnet_fields__fields_set(self): "AsiaqDataPipeline.from_template with a log location and subnet ID" pipeline = AsiaqDataPipeline.from_template( name="asdf", description="qwerty", template_name="dynamodb_backup", log_location="FAKEY", subnet_id="McFAKEFAKE") self.assertFalse(pipeline._tags) self.assertFalse(pipeline.is_persisted()) self.assertTrue(pipeline.has_content()) def _find_default(objects): for obj in objects: if obj['id'] == 'Default': return obj default_object = _find_default(pipeline._objects) self.assertIn({ 'key': 'pipelineLogUri', 'stringValue': 'FAKEY' }, default_object['fields']) self.assertIn({ 'key': 'subnetId', 'stringValue': 'McFAKEFAKE' }, default_object['fields'])
def test__from_template__backup_period_value(self): "AsiaqDataPipeline.from_template test if from_template contains myDDBSchedulePeriod." expected_period_value = "#{myDDBSchedulePeriod}" pipeline = AsiaqDataPipeline.from_template( name="asdf", description="qwerty", template_name="dynamodb_backup") actual_pipeline_schedule = pipeline._objects[0] actual_schedule_fields = actual_pipeline_schedule['fields'] self.assertEqual(expected_period_value, actual_schedule_fields[0]['stringValue'])
def test__update_content__new_and_old_values__values_updated(self): "AsiaqDataPipeline.update_content overwrites parameter values when appropriate" orig_values = {'this': 'will', 'be': 'overwritten'} pipeline = AsiaqDataPipeline(name="asdf", description="qwerty", param_values=orig_values) new_values = {'foo': 'bar', 'baz': '1'} pipeline_objects = Mock() param_defs = Mock() pipeline.update_content(pipeline_objects, param_defs, new_values) self.assertEqual([{ 'id': 'foo', 'stringValue': 'bar' }, { 'id': 'baz', 'stringValue': '1' }], pipeline._param_values)
def test__pipeline_state__field_set__value_found(self): "AsiaqDataPipeline.pipeline_state is found if set" pipeline = AsiaqDataPipeline("TEST", "TESTY", metadata=[{ 'key': '@pipelineState', 'stringValue': 'NIFTY' }]) self.assertEqual('NIFTY', pipeline.pipeline_state)
def test__update_content__bad_args__error(self): "AsiaqDataPipeline.update_content with bad argument combinations fails" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty") self.assertRaises(asiaq_exceptions.ProgrammerError, pipeline.update_content) self.assertRaises(asiaq_exceptions.ProgrammerError, pipeline.update_content, template_name="something", contents="something else")
def test__health__no_field__none_returned(self): "AsiaqDataPipeline.health is none if the field is absent" pipeline = AsiaqDataPipeline("TEST", "TESTY", metadata=[{ 'key': '@foo', 'stringValue': 'bar' }]) self.assertIsNone(pipeline.health)
def test__health__field_set__value_found(self): "AsiaqDataPipeline.health is found if set" pipeline = AsiaqDataPipeline("TEST", "TESTY", metadata=[{ 'key': '@healthStatus', 'stringValue': 'SUPERHEALTHY' }]) self.assertEqual('SUPERHEALTHY', pipeline.health)
def test__last_run__no_field__none_returned(self): "AsiaqDataPipeline.last_run is None when metadata does not include last-run" pipeline = AsiaqDataPipeline("TEST", "TESTY", metadata=[{ 'key': '@foo', 'stringValue': 'bar' }]) self.assertIsNone(pipeline.last_run)
def test__get_tag_dict__malformed_tag__exception(self): "AsiaqDataPipeline.get_tag_dict with an invalid tag definition" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty", tags=[{ 'key': 'template', 'stringValue': 'conflict' }]) self.assertRaises(asiaq_exceptions.DataPipelineFormatException, pipeline.get_tag_dict)
def test__create_date__field_absent__exception(self): "AsiaqDataPipeline.create_date causes an exception if not set" pipeline = AsiaqDataPipeline("TEST", "TESTY", metadata=[{ 'key': '@foo', 'stringValue': 'bar' }]) with self.assertRaises(KeyError): _ = pipeline.create_date
def test__from_template__template_ok__reasonable(self): "AsiaqDataPipeline.from_template with a valid template" pipeline = AsiaqDataPipeline.from_template( name="asdf", description="qwerty", template_name="dynamodb_backup") self.assertFalse(pipeline._tags) self.assertFalse(pipeline.is_persisted()) self.assertTrue(pipeline.has_content()) # nasty cherry-pick: self.assertEqual("SchedulePeriod", pipeline._objects[0]['id']) self.assertEqual(pipeline._name, "asdf") self.assertEqual(pipeline._description, "qwerty")
def test__get_tag_dict__duplicate_tag__exception(self): "AsiaqDataPipeline.get_tag_dict with a duplicate tag definition" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty", tags=[{ 'key': 'template', 'value': 'silly' }, { 'key': 'template', 'value': 'conflict' }]) self.assertRaises(asiaq_exceptions.DataPipelineFormatException, pipeline.get_tag_dict)
def test__get_param_value_dict__duplicate_value__exception(self): "AsiaqDataPipeline.get_param_value_dict with a duplicate value definition" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty", param_values=[{ 'id': 'template', 'stringValue': 'silly' }, { 'id': 'template', 'stringValue': 'conflict' }]) self.assertRaises(asiaq_exceptions.DataPipelineFormatException, pipeline.get_param_value_dict)
def test__update_content__dict_values__content_updated(self): "AsiaqDataPipeline.update_content with silly dictionary parameter values" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty") pipeline_objects = Mock() param_defs = Mock() param_values = {'foo': 'bar', 'baz': '1'} pipeline.update_content(pipeline_objects, param_defs, param_values) self.assertIs(pipeline._objects, pipeline_objects) self.assertIs(pipeline._params, param_defs) self.assertEqual(2, len(pipeline._param_values)) self.assertIs(list, type(pipeline._param_values)) self.assertIn({ 'id': 'foo', 'stringValue': 'bar' }, pipeline._param_values) self.assertIn({ 'id': 'baz', 'stringValue': '1' }, pipeline._param_values) self.assertEqual({ "foo": "bar", "baz": "1" }, pipeline.get_param_value_dict())
def test__update_content__list_values__content_updated(self): "AsiaqDataPipeline.update_content with silly listed parameter values" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty") pipeline_objects = Mock() param_defs = Mock() param_values = [{ 'id': 'foo', 'stringValue': 'bar' }, { 'id': 'bar', 'stringValue': 'baz' }, { 'id': 'simple', 'stringValue': 'string' }] pipeline.update_content(pipeline_objects, param_defs, param_values) self.assertIs(pipeline._objects, pipeline_objects) self.assertIs(pipeline._params, param_defs) self.assertEqual(3, len(pipeline._param_values)) self.assertEqual({ "foo": "bar", "bar": "baz", "simple": "string" }, pipeline.get_param_value_dict())
def test__last_run__valid_date__datetime_returned(self): "AsiaqDataPipeline.last_run is a correct datetime" pipeline = AsiaqDataPipeline("TEST", "TESTY", metadata=[{ 'key': '@latestRunTime', 'stringValue': '1978-08-05T08:00:00' }]) self.assertEqual(1978, pipeline.last_run.year) self.assertEqual(8, pipeline.last_run.month) self.assertEqual(5, pipeline.last_run.day) self.assertEqual(8, pipeline.last_run.hour) self.assertEqual(0, pipeline.last_run.utcoffset().total_seconds())
def test__create_date__field_set__datetime_found(self): "AsiaqDataPipeline.create_date is a correct datetime" pipeline = AsiaqDataPipeline("TEST", "TESTY", metadata=[{ 'key': '@creationTime', 'stringValue': '2008-01-20T17:00:00' }]) self.assertEqual(2008, pipeline.create_date.year) self.assertEqual(1, pipeline.create_date.month) self.assertEqual(20, pipeline.create_date.day) self.assertEqual(17, pipeline.create_date.hour) self.assertEqual(0, pipeline.create_date.utcoffset().total_seconds())
def test__from_template__log_and_subnet_fields__fields_set(self): "AsiaqDataPipeline.from_template with a log location and subnet ID" pipeline = AsiaqDataPipeline.from_template( name="asdf", description="qwerty", template_name="dynamodb_backup", log_location="FAKEY", subnet_id="McFAKEFAKE") self.assertFalse(pipeline._tags) self.assertFalse(pipeline.is_persisted()) self.assertTrue(pipeline.has_content()) def _find_default(objects): for obj in objects: if obj['id'] == 'Default': return obj default_object = _find_default(pipeline._objects) self.assertIn({'key': 'pipelineLogUri', 'stringValue': 'FAKEY'}, default_object['fields']) self.assertIn({'key': 'subnetId', 'stringValue': 'McFAKEFAKE'}, default_object['fields'])
def _unpersisted_pipeline(self, contents=None): "Return a pipeline with no AWS ID." return AsiaqDataPipeline("test", "pipeline with no id", contents=contents)
def test__get_tag_dict__no_tags__no_return(self): "AsiaqDataPipeline.get_tag_dict with no tags" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty") self.assertIsNone(pipeline.get_tag_dict())
def test__get_tag_dict__tags_dict_passed__correct_return(self): "AsiaqDataPipeline.get_tag_dict with tags passed as dict" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty", tags={'template': 'silly'}) self.assertEqual({"template": "silly"}, pipeline.get_tag_dict())
def test__last_run__no_metadata__state_exception(self): "AsiaqDataPipeline.last_run fails appropriately when no metadata is set" pipeline = AsiaqDataPipeline("TEST", "TESTY") with self.assertRaises(asiaq_exceptions.DataPipelineStateException): _ = pipeline.last_run
def test__update_content__template__content_updated(self): "AsiaqDataPipeline.update_content with a template" pipeline = AsiaqDataPipeline(name="asdf", description="qwerty") pipeline.update_content(template_name="dynamodb_restore") self.assertTrue(pipeline.has_content()) self.assertEqual("DDBDestinationTable", pipeline._objects[1]['id'])
def test__get_tag_dict__tags_list_passed__correct_return(self): "AsiaqDataPipeline.get_tag_dict with tags passed as list" pipeline = AsiaqDataPipeline( name="asdf", description="qwerty", tags=[{'key': 'template', 'value': 'silly'}, {'key': 'another', 'value': 'tag'}]) self.assertEqual({"template": "silly", "another": "tag"}, pipeline.get_tag_dict())
def _persisted_pipeline(self, contents=None): "Return a pipeline with a set AWS ID, so that it apppears to be 'saved' to AWS already." return AsiaqDataPipeline("test", "pipeline with id", pipeline_id="asdf", contents=contents)