def test_one_to_many_fields_not_supported(self, migration_apps): """ Test that if an item in the yaml includes a one-to-many field, the function raises NotImplementedError as this is not supported. """ yaml_content = """ - model: datahub.core.test.support.book pk: 1 fields: name: name published_on: '2010-01-01' - model: datahub.core.test.support.person pk: 1 fields: first_name: A last_name: Person proofread_books: - 1 """ mocked_read = mock.mock_open(read_data=yaml_content) with pytest.raises(NotImplementedError) as excinfo: with mock.patch('datahub.core.migration_utils.open', mocked_read, create=True): load_yaml_data_in_migration(migration_apps, 'path-to-file.yaml') assert str(excinfo.value) == 'One-to-many fields not supported'
def test_invalid_file_raises_exception(self, migration_apps): """ Test that if an invalid filename is passed in, the function raises FileNotFoundError. """ with pytest.raises(FileNotFoundError): load_yaml_data_in_migration(migration_apps, 'invalid-path-to-file.yaml')
def test_many_to_many_fields_not_supported(self, migration_apps): """ Test that if an item in the yaml includes a many-to-many field, the function raises NotImplementedError as this is not supported yet. """ yaml_content = """ - model: datahub.core.test.support.book pk: 1 fields: name: name authors: - 1 - 2 published_on: '2010-01-01' """ mocked_read = mock.mock_open(read_data=yaml_content) PersonFactory.create_batch(2, pk=factory.Iterator([1, 2])) with pytest.raises(NotImplementedError) as excinfo: with mock.patch('datahub.core.migration_utils.open', mocked_read, create=True): load_yaml_data_in_migration(migration_apps, 'path-to-file.yaml') assert str(excinfo.value) == 'Many-to-many fields not supported'
def test_exception_rollsback_changes(self, migration_apps): """ Test that if an exception happens when processing the file, the changes are rolled back. """ yaml_content = """ - model: datahub.core.test.support.person pk: 1 fields: first_name: Person last_name: 1 - model: datahub.core.test.support.person pk: 3 fields: invalid_field: error - model: datahub.core.test.support.person pk: 3 fields: first_name: Person last_name: 3 """ mocked_read = mock.mock_open(read_data=yaml_content) assert not Person.objects.count() with pytest.raises(FieldDoesNotExist): with mock.patch('datahub.core.migration_utils.open', mocked_read, create=True): load_yaml_data_in_migration(migration_apps, 'path-to-file.yaml') assert not Person.objects.count()
def test_pk_required(self, migration_apps): """ Test that if an item in the yaml does not include the pk field, the function raises AssertionError. """ yaml_content = """ - model: datahub.core.test.support.person fields: first_name: Existing """ mocked_read = mock.mock_open(read_data=yaml_content) with pytest.raises(AssertionError) as excinfo: with mock.patch('datahub.core.migration_utils.open', mocked_read, create=True): load_yaml_data_in_migration(migration_apps, 'path-to-file.yaml') assert str(excinfo.value) == 'pk field required'
def test_invalid_field_raises_exception(self, migration_apps): """ Test that if there's a problem with deserialising a field, the function raises DeserializationError. """ yaml_content = """ - model: datahub.core.test.support.book pk: 1 fields: name: Book name published_on: 'invalid' """ mocked_read = mock.mock_open(read_data=yaml_content) with pytest.raises(base.DeserializationError) as excinfo: with mock.patch('datahub.core.migration_utils.open', mocked_read, create=True): load_yaml_data_in_migration(migration_apps, 'path-to-file.yaml') assert str(excinfo.value) == ( "[\"'invalid' value has an invalid date format. It must be in YYYY-MM-DD format.\"]: " "(book:pk=1) field_value was 'invalid'" )
def test_invalid_fk_raises_exception(self, migration_apps): """ Test that if there's a problem with deserialising the fk field, the function raises DeserializationError. """ yaml_content = """ - model: datahub.core.test.support.book pk: 1 fields: name: Book name proofreader: 'invalid' published_on: '2010-01-01' """ mocked_read = mock.mock_open(read_data=yaml_content) with pytest.raises(base.DeserializationError) as excinfo: with mock.patch('datahub.core.migration_utils.open', mocked_read, create=True): load_yaml_data_in_migration(migration_apps, 'path-to-file.yaml') assert str(excinfo.value) == ( "['“invalid” value must be an integer.']: (book:pk=1) field_value was 'invalid'" )
def load_one_list_tiers(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0001_one_list_tiers.yaml', )
def load_initial_classifications(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0010_company_classifications.yaml')
def load_initial_ts_and_cs(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parents[1] / 'fixtures/initial_terms_and_conditions.yaml')
def load_service_questions_and_answers(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0074_update_service_questions_and_answers.yaml', )
def load_referral_source_activities(apps, _): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0020_update_referral_source_activities.yaml')
def update_specific_programmes(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / "0005_update_specific_programmes.yaml" )
def load_administrative_areas(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0022_add_administrative_area.yaml', )
def load_gva_multiplier(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0060_gvamultiplier.yaml', )
def load_fixtures(apps, schema_editor): for fixture in fixtures: load_yaml_data_in_migration(apps, fixture)
def load_services(apps, _): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0017_update_services.yaml')
def load_exchange_rate(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0018_add_usd_exchange_rate.yaml')
def load_sectors(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0002_add_sector_hierarchy.yaml')
def load_investment_spi_services(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0012_investment_spi_service_aftercare_offered.yaml')
def inner(apps, schema_editor): load_yaml_data_in_migration(apps, PurePath(__file__).parent / yaml_file_name)
def load_activity_types(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0056_activity_type.yaml', )
def test_loading(self, migration_apps): """ Test that loading a yaml file updates the existing data. """ yaml_content = """ # person with pk=1, last_name should change - model: datahub.core.test.support.person pk: 1 fields: first_name: Existing last_name: Person with changed surname # person with pk=3, first_name should change, last_name shouldn't change - model: datahub.core.test.support.person pk: 3 fields: first_name: Another existing # person with pk=10, a new record should be created - model: datahub.core.test.support.person pk: 10 fields: first_name: New last_name: Person # book with pk=1, fk to person (proofreader) should change - model: datahub.core.test.support.book pk: 1 fields: name: Book name proofreader: 3 published_on: '2010-01-01' """ mocked_read = mock.mock_open(read_data=yaml_content) people = PersonFactory.create_batch( 3, pk=factory.Iterator([1, 2, 3]), first_name='Existing', last_name='Person', ) BookFactory( pk=1, name='Previous book name', proofreader=people[0], published_on=datetime.date(2010, 1, 1), authors=[], ) with mock.patch('datahub.core.migration_utils.open', mocked_read, create=True): load_yaml_data_in_migration(migration_apps, 'path-to-file.yaml') qs = Person.objects.order_by('id').values('id', 'first_name', 'last_name') assert list(qs) == [ {'id': 1, 'first_name': 'Existing', 'last_name': 'Person with changed surname'}, {'id': 2, 'first_name': 'Existing', 'last_name': 'Person'}, {'id': 3, 'first_name': 'Another existing', 'last_name': 'Person'}, {'id': 10, 'first_name': 'New', 'last_name': 'Person'}, ] qs = Book.objects.order_by('id').values('id', 'name', 'proofreader', 'published_on') assert list(qs) == [ { 'id': 1, 'name': 'Book name', 'proofreader': 3, 'published_on': datetime.date(2010, 1, 1), }, ]
def load_project_manager_request_status_data(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0055_project_manager_request_status.yaml', )
def load_delivery_partners(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0039_additional_delivery_partners.yaml')
def load_initial_overseas_regions(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0013_initial_overseas_region.yaml', )
def load_metadata(apps, schema_editor): for file_name in metadata_files: load_yaml_data_in_migration( apps, PurePath(__file__).parent / f'0001_initial/{file_name}', )
def load_initial_inner_template(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0002_notificationinnertemplate.yaml', )
def load_services(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0026_update_services.yaml')
def load_initial_countries(apps, schema_editor): load_yaml_data_in_migration( apps, PurePath(__file__).parent / '0001_initial_countries.yaml', )