def test_create_entry_does_not_delete_attributes_in_existing(self): loader = self._make_loader() entry = self._make_entry(loader, extra=u'extra') h.assert_true('extra' in entry) new_loader = self._make_loader() upserted_entry = self._make_entry(new_loader, extra=u'extra') h.assert_true('extra' in upserted_entry)
def test_create_entity_does_not_delete_attributes_in_existing(self): loader = self._make_loader() entity = loader.create_entity(u'Test Entity', extra=u'extra') h.assert_true('extra' in entity) new_loader = self._make_loader() upserted_entity = new_loader.create_entity(u'Test Entity') h.assert_true('extra' in upserted_entity)
def test_classifiers_cached_with_passed_in_cache(self): loader = self._make_loader() cache = {} classifier = loader.create_classifier(name=u'Test Classifier', taxonomy=u'taxonomy', _cache=cache) h.assert_true(classifier is cache.values()[0]) h.assert_equal(len(loader.classifier_cache), 0)
def test_erroneous_values(self): data = csv_fixture("erroneous_values") model = csv_fixture_model() importer = CSVImporter(data, model) importer.run(dry_run=True) h.assert_equal(len(importer.errors), 1) h.assert_true("date" in importer.errors[0].message, "Should find badly formatted date") h.assert_equal(importer.errors[0].line_number, 5)
def test_import_errors(self): data = csv_fixture("import_errors") model = csv_fixture_model() importer = CSVImporter(data, model) importer.run(dry_run=True) h.assert_true(len(importer.errors) > 1, "Should have errors") h.assert_equal(importer.errors[0].line_number, 1, "Should detect missing date colum in line 1")
def test_create_entry_creates_entities(self): loader = self._make_loader() special_entity = loader.create_entity(name='special') testentry = {'name': 'testentry'} loader.entitify_entry(testentry, special_entity, 'special') h.assert_true('special' in testentry) h.assert_equal(len(testentry['entities']), 1) h.assert_equal(special_entity['_id'], testentry['entities'][0]) created = self._make_entry(loader, **testentry) h.assert_equal(len(created['entities']), 3)
def test_create_classifier_does_not_delete_attributes_in_existing(self): loader = self._make_loader() classifier = loader.create_classifier(u'Test Classifier', taxonomy=u'taxonomy', extra=u'extra') h.assert_true('extra' in classifier) new_loader = self._make_loader() upserted_classifier = new_loader.create_classifier( u'Test Classifier', taxonomy=u'taxonomy') h.assert_true('extra' in upserted_classifier)
def validation_errors(cls, fp_or_str): obj = _load_json(fp_or_str) try: cls().deserialize(obj) except Invalid as e: return e.asdict() h.assert_true( False, "Expected validation to throw errors, but none thrown." )
def test_empty_csv(self): empty_data = StringIO("") model = csv_fixture_model() importer = CSVImporter(empty_data, model) importer.run(dry_run=True) h.assert_equal(len(importer.errors), 2) h.assert_equal(importer.errors[0].line_number, 0) h.assert_equal(importer.errors[1].line_number, 0) h.assert_true("Didn't read any lines of data" in str(importer.errors[1].message))
def test_successful_import(self): data = csv_fixture("successful_import") model = csv_fixture_model() importer = CSVImporter(data, model) importer.run() dataset = Dataset.find_one() h.assert_true(dataset is not None, "Dataset should not be None") h.assert_equal(dataset.name, "test-csv") entries = list(Entry.find({"dataset.name": dataset.name})) h.assert_equal(len(entries), 4) entry = Entry.find_one({"provenance.line": 2}) h.assert_true(entry is not None, "Entry with name could not be found") h.assert_equal(entry.amount, 130000.0)
def test_create_entry_returns_query_spec(self): from bson import ObjectId loader = self._make_loader() entry = {'name': 'one', 'amount': 1000.00, 'from': loader.create_entity(u'From Entity'), 'to': loader.create_entity(u'To Entity'), 'first': u'first', 'second': u'second', 'extra': u'extra'} query_spec = loader.create_entry(**entry) h.assert_true(isinstance(query_spec['_id'], ObjectId)) fetched_entry = Entry.find_one(query_spec) h.assert_equal(fetched_entry['name'], 'one')
def test_classify_entry(self): loader = self._make_loader() entry = {'name': u'Test Entry', 'amount': 1000.00} c_name = u'support-transparency' c_taxonomy = u'Good Reasons' c_label = u'Support Transparency Initiatives' classifier = loader.create_classifier(name=c_name, label=c_label, taxonomy=c_taxonomy) loader.classify_entry(entry, classifier, name=u'reason') h.assert_equal(entry.keys(), [u'reason', 'amount', 'name', 'classifiers']) h.assert_equal(entry['classifiers'], [classifier['_id']]) h.assert_equal(entry['reason']['label'], c_label) h.assert_equal(entry['reason']['name'], c_name) h.assert_equal(entry['reason']['taxonomy'], c_taxonomy) h.assert_true(isinstance(entry['reason']['ref'], DBRef))
def test_successful_import_with_simple_testdata(self): data = csv_fixture("simple") model = csv_fixture_model(name="simple") importer = CSVImporter(data, model) importer.run() h.assert_equal(importer.errors, []) dataset = Dataset.find_one() h.assert_true(dataset is not None, "Dataset should not be None") entries = list(Entry.find({"dataset.name": dataset.name})) h.assert_equal(len(entries), 5) entry = entries[0] h.assert_equal(entry["from"]["label"], "Test From") h.assert_equal(entry["to"]["label"], "Test To") h.assert_equal(entry["time"]["unparsed"], "2010-01-01") h.assert_equal(entry["amount"], 100.00)
def test_missing_columns(self): data = "Uninteresting,Columns\n" "Uninteresting,Values" importer = MappingImporter() try: importer.import_from_string(data) except AssertionError, E: h.assert_true('The Metadata document must have the columns "Original Field",' in str(E)) h.assert_true("The column(s)" in str(E)) h.assert_true("are missing." in str(E)) return
def test_create_entity(self): loader = self._make_loader() entity = loader.create_entity(name=u'Test Entity') h.assert_true(isinstance(entity, Entity))
def test_create_entry_with_different_match_keys(self): loader = self._make_loader() loader.create_entity(name=u'Test', company_id=1000, match_keys=('company_id',)) h.assert_equal(len(loader.entity_cache[('company_id',)]), 1) h.assert_true(Entity.find_one({'company_id': 1000}) is not None)
def test_entities_are_cached(self): loader = self._make_loader() entity = loader.create_entity(name=u'Test Entity') h.assert_true(entity is loader.entity_cache[('name',)].values()[0])
def test_create_entry(self): loader = self._make_loader() entry = self._make_entry(loader) h.assert_true(isinstance(entry, Entry))
def test_currency_invalid(self): errs = validation_errors(Dataset, '{"currency": "bad-currency-code"}') h.assert_true( "currency" in errs.get('currency'), "'currency' not in validation errors!" )
def test_create_loader(self): loader = self._make_loader() h.assert_true(isinstance(loader.dataset, Dataset)) h.assert_equal(loader.dataset.name, u'test_dataset') h.assert_equal(loader.num_entries, 0)
def test_entities_cached_with_passed_in_cached(self): loader = self._make_loader() cache = {('name',): {}} entity = loader.create_entity(name=u'Test Entity', _cache=cache) h.assert_true(entity is cache[('name', )].values()[0]) h.assert_equal(len(loader.entity_cache), 0)
def test_create_classifier(self): loader = self._make_loader() classifier = loader.create_classifier(name=u'Test Classifier', taxonomy=u'taxonomy') h.assert_true(isinstance(classifier, Classifier))
def test_default_society(self): loader = self._make_loader() society = loader.get_default_society() h.assert_true(isinstance(society, Entity))
def test_name_badcharacters(self): errs = validation_errors(Dataset, '{"name": "Not Valid"}') h.assert_true( "Dataset name must include only" in errs.get('name'), "'Dataset name must include only' not in validation errors!" )