def handle(self, *args, **options): session = requests.Session() for obj in models.__dict__.values(): if hasattr(obj, "reload") and callable(obj.reload): print "Loading {} records".format(obj._meta.verbose_name) obj.reload(session) update_countries_from_imports() integrate_imports()
def test_add_and_delete_from_additionallanguage(self): additional = AdditionalLanguage.objects.create(ietf_tag="zzz-z-test", common_name="ZTest") additional.save() integrate_imports() # run task synchronously data = Language.names_text().split("\n") self.assertTrue("zzz-z-test\tZTest" in data) additional.delete() data = Language.names_text().split("\n") self.assertTrue("zzz-z-test\tZTest" not in data)
def test_add_and_delete_from_additionallanguage(self): additional = AdditionalLanguage.objects.create( ietf_tag="zzz-z-test", common_name="ZTest" ) additional.save() integrate_imports() # run task synchronously data = Language.names_text().split("\n") self.assertTrue("zzz-z-test\tZTest" in data) additional.delete() data = Language.names_text().split("\n") self.assertTrue("zzz-z-test\tZTest" not in data)
def setUpClass(cls): super(LanguageIntegrationTests, cls).setUpClass() wikipedia = open( os.path.join( os.path.dirname(__file__), "../imports/tests/data/wikipedia.html")).read() # noqa ethno = open( os.path.join( os.path.dirname(__file__), "../imports/tests/data/LanguageCodes.tab")).read() # noqa country = open( os.path.join( os.path.dirname(__file__), "../imports/tests/data/CountryCodes.tab")).read() # noqa sil = open( os.path.join(os.path.dirname(__file__), "../imports/tests/data/iso_639_3.tab")).read() # noqa w_country = open( os.path.join( os.path.dirname(__file__), "../imports/tests/data/wikipedia_country.html")).read() with patch("requests.Session") as mock_requests: mock_requests.get().status_code = 200 mock_requests.get().content = wikipedia WikipediaISOLanguage.reload(mock_requests) mock_requests.get().content = ethno EthnologueLanguageCode.reload(mock_requests) mock_requests.get().content = country EthnologueCountryCode.reload(mock_requests) mock_requests.get().content = sil SIL_ISO_639_3.reload(mock_requests) mock_requests.get().content = w_country WikipediaISOCountry.reload(mock_requests) management.call_command("loaddata", "additional-languages.json", verbosity=1, noinput=True) management.call_command("loaddata", "uw_region_seed.json", verbosity=1, noinput=True) update_countries_from_imports() # run task synchronously here integrate_imports() # run task synchronously here
def setUpClass(cls): super(LanguageIntegrationTests, cls).setUpClass() wikipedia = open(os.path.join(os.path.dirname(__file__), "../imports/tests/data/wikipedia.html")).read() # noqa ethno = open(os.path.join(os.path.dirname(__file__), "../imports/tests/data/LanguageCodes.tab")).read() # noqa country = open(os.path.join(os.path.dirname(__file__), "../imports/tests/data/CountryCodes.tab")).read() # noqa sil = open(os.path.join(os.path.dirname(__file__), "../imports/tests/data/iso_639_3.tab")).read() # noqa w_country = open(os.path.join(os.path.dirname(__file__), "../imports/tests/data/wikipedia_country.html")).read() with patch("requests.Session") as mock_requests: mock_requests.get().status_code = 200 mock_requests.get().content = wikipedia WikipediaISOLanguage.reload(mock_requests) mock_requests.get().content = ethno EthnologueLanguageCode.reload(mock_requests) mock_requests.get().content = country EthnologueCountryCode.reload(mock_requests) mock_requests.get().content = sil SIL_ISO_639_3.reload(mock_requests) mock_requests.get().content = w_country WikipediaISOCountry.reload(mock_requests) management.call_command("loaddata", "additional-languages.json", verbosity=1, noinput=True) management.call_command("loaddata", "uw_region_seed.json", verbosity=1, noinput=True) update_countries_from_imports() # run task synchronously here integrate_imports() # run task synchronously here