def test_currency_when_create_entry(self): loader = self._make_loader(currency=u'Default') entry = self._make_entry(loader) h.assert_equal(entry['currency'], u'DEFAULT') entry = self._make_entry(loader, name='Other Entry', currency=u'other') h.assert_equal(entry['currency'], u'OTHER')
def test_urlopen_lines(self, urlopen_mock): urlopen_mock.return_value = DATA_FP lines = [line for line in util.urlopen_lines("http://none")] h.assert_equal(lines, ["line one\n", "line two\n", "line three"])
def test_loader_creates_changeobject_for_entities(self): loader = self._make_loader() entity = loader.create_entity(u'Test Entity') changeobj = self._find_changeobject('entity', entity.id) h.assert_equal(changeobj['changeset']['_id'], loader.changeset.id) h.assert_equal(changeobj['data']['name'], entity['name'])
def test_job_log_stderr(self): daemon.dispatch_job('test', config['__file__'], 'test_stderr') while daemon.job_running('test'): time.sleep(0.1) h.assert_equal(daemon.job_log('test'), 'Text to standard error\n')
def test_erroneous_values(self): data = csv_fixture("erroneous_values") model = csv_fixture_model() importer = CSVImporter(data, model) importer.run(dry_run=True) h.assert_equal(len(importer.errors), 1) h.assert_true("date" in importer.errors[0].message, "Should find badly formatted date") h.assert_equal(importer.errors[0].line_number, 5)
def test_import_errors(self): data = csv_fixture("import_errors") model = csv_fixture_model() importer = CSVImporter(data, model) importer.run(dry_run=True) h.assert_true(len(importer.errors) > 1, "Should have errors") h.assert_equal(importer.errors[0].line_number, 1, "Should detect missing date colum in line 1")
def test_blank_errors(self): errs = validation_errors(Mapping, '{}') h.assert_equal(errs, { 'from': 'Required', 'to': 'Required', 'time': 'Required', 'amount': 'Required' })
def test_loader_creates_changeobject_for_classifiers(self): loader = self._make_loader() classifier = loader.create_classifier(u'testclassifier', u'testtaxonomy') changeobj = self._find_changeobject('classifier', classifier.id) h.assert_equal(changeobj['changeset']['_id'], loader.changeset.id) h.assert_equal(changeobj['data']['name'], classifier['name'])
def test_args(self): args = ('one', '123', 'abc') daemon.dispatch_job('test', config['__file__'], 'test_args', args) while daemon.job_running('test'): time.sleep(0.1) h.assert_equal(daemon.job_log('test'), "('one', '123', 'abc')\n")
def test_classifiers_cached_with_passed_in_cache(self): loader = self._make_loader() cache = {} classifier = loader.create_classifier(name=u'Test Classifier', taxonomy=u'taxonomy', _cache=cache) h.assert_true(classifier is cache.values()[0]) h.assert_equal(len(loader.classifier_cache), 0)
def test_empty(self): errs = validation_errors(Dataset, '{}') h.assert_equal(errs, { 'name': 'Required', 'label': 'Required', 'description': 'Required', 'currency': 'Required' })
def test_error_with_empty_additional_date_column(self): name = "empty_additional_date_column" data = csv_fixture(name) model = csv_fixture_model(name=name) importer = CSVImporter(data, model) importer.run() # We are currently not able to import date cells without a value. See: # http://trac.openspending.org/ticket/170 h.assert_equal(len(importer.errors), 1)
def test_nested_classifier_columns(self): data = csv_fixture("nested-mapping").read() importer = MappingImporter() mapping = importer.import_from_string(data) to_fields = mapping["to"]["fields"] h.assert_equal(len(to_fields), 2) h.assert_equal(to_fields[0]["column"], u"paid_to") h.assert_equal(to_fields[0]["name"], "label") h.assert_equal(to_fields[1]["column"], u"paid_to_identifier") h.assert_equal(to_fields[1]["name"], "identifier")
def test_line_in_error(self): importer = MappingImporter() data = csv_fixture("wrong-objecttype-mapping").read() try: importer.import_from_string(data) except ValueError, E: errors = E.args[0] h.assert_equal(len(errors), 1) h.assert_equal(errors[0]["line"], 2) return
def test_create_finds_existing_entity_in_db(self): Entity.c.save({'name': 'existing', 'company_id': 1000}) existing = Entity.find_one({'company_id': 1000}) loader = self._make_loader() loader.create_entity(name=u'Test', company_id=1000, match_keys=('company_id',)) cached = loader.entity_cache[('company_id',)][(1000,)] h.assert_equal(existing['_id'], cached['_id']) h.assert_equal(Entity.find({'company_id': 1000}).count(), 1)
def test_create_entry_returns_query_spec(self): from bson import ObjectId loader = self._make_loader() entry = {'name': 'one', 'amount': 1000.00, 'from': loader.create_entity(u'From Entity'), 'to': loader.create_entity(u'To Entity'), 'first': u'first', 'second': u'second', 'extra': u'extra'} query_spec = loader.create_entry(**entry) h.assert_true(isinstance(query_spec['_id'], ObjectId)) fetched_entry = Entry.find_one(query_spec) h.assert_equal(fetched_entry['name'], 'one')
def test_loader_creates_indexes(self): db = mongo.db() db.create_collection('entry') db.create_collection('entity') h.assert_equal(self._get_index_num(Entry), 1) h.assert_equal(self._get_index_num(Entity), 1) self._make_loader() h.assert_equal(self._get_index_num(Entry), 9) h.assert_equal(self._get_index_num(Entity), 2)
def test_classify_entry(self): loader = self._make_loader() entry = {'name': u'Test Entry', 'amount': 1000.00} c_name = u'support-transparency' c_taxonomy = u'Good Reasons' c_label = u'Support Transparency Initiatives' classifier = loader.create_classifier(name=c_name, label=c_label, taxonomy=c_taxonomy) loader.classify_entry(entry, classifier, name=u'reason') h.assert_equal(entry.keys(), [u'reason', 'amount', 'name', 'classifiers']) h.assert_equal(entry['classifiers'], [classifier['_id']]) h.assert_equal(entry['reason']['label'], c_label) h.assert_equal(entry['reason']['name'], c_name) h.assert_equal(entry['reason']['taxonomy'], c_taxonomy) h.assert_true(isinstance(entry['reason']['ref'], DBRef))
def test_create_entry_creates_entities(self): loader = self._make_loader() special_entity = loader.create_entity(name='special') testentry = {'name': 'testentry'} loader.entitify_entry(testentry, special_entity, 'special') h.assert_true('special' in testentry) h.assert_equal(len(testentry['entities']), 1) h.assert_equal(special_entity['_id'], testentry['entities'][0]) created = self._make_entry(loader, **testentry) h.assert_equal(len(created['entities']), 3)
def test_empty_csv(self): empty_data = StringIO("") model = csv_fixture_model() importer = CSVImporter(empty_data, model) importer.run(dry_run=True) h.assert_equal(len(importer.errors), 2) h.assert_equal(importer.errors[0].line_number, 0) h.assert_equal(importer.errors[1].line_number, 0) h.assert_true("Didn't read any lines of data" in str(importer.errors[1].message))
def test_successful_import(self): data = csv_fixture("successful_import") model = csv_fixture_model() importer = CSVImporter(data, model) importer.run() dataset = Dataset.find_one() h.assert_true(dataset is not None, "Dataset should not be None") h.assert_equal(dataset.name, "test-csv") entries = list(Entry.find({"dataset.name": dataset.name})) h.assert_equal(len(entries), 4) entry = Entry.find_one({"provenance.line": 2}) h.assert_true(entry is not None, "Entry with name could not be found") h.assert_equal(entry.amount, 130000.0)
def test_wrong_objecttype(self): importer = MappingImporter() data = csv_fixture("wrong-objecttype-mapping").read() try: importer.import_from_string(data) except ValueError, E: errors = E.args[0] h.assert_equal(len(errors), 1) h.assert_equal(errors[0]["line"], 2) h.assert_equal( errors[0]["message"], (u'Value in column "ObjectType" is "entit". ' u'Allowed values: "classifier", "entity", ' u'"value"'), ) return
def test_blank_errors(self): errs = validation_errors(Field, '{}') h.assert_equal(errs, { 'name': 'Required', 'datatype': 'Required' })
def test_blank_errors(self): errs = validation_errors(DateDimension, '{}') h.assert_equal(errs, { 'column': 'Required' })
def test_blank(self): res = validation_result(Fields, '[]') h.assert_equal(res, [])
def test_empty_mapping(self): data = csv_fixture("simple-mapping").read() importer = MappingImporter() mapping = importer.import_from_string(data) h.assert_equal(sorted(mapping.keys()), [u"amount", u"currency", u"from", u"time", u"to"]) h.assert_equal( mapping["amount"], { "column": u"amount", "datatype": u"float", "default_value": u"x", "description": u"z", "label": u"y", "type": u"value", }, ) h.assert_equal( mapping["currency"], { "column": u"currency", "datatype": u"string", "default_value": u"GBP", "description": u"z", "label": u"y", "type": u"value", }, ) h.assert_equal( mapping["from"], { "description": u"z", "fields": [ { "column": u"paid_by", "constant": "", "datatype": u"string", "default_value": u"x", "name": "label", } ], "label": u"y", "type": u"entity", }, ) h.assert_equal( mapping["to"], { "description": u"z", "fields": [ { "column": u"paid_to", "constant": "", "datatype": u"string", "default_value": u"x", "name": "label", } ], "label": u"y", "type": u"entity", }, ) h.assert_equal( mapping["time"], { "column": u"date", "datatype": u"date", "default_value": u"x", "description": u"z", "label": u"y", "type": u"value", }, )
def test_successful_import_with_simple_testdata(self): data = csv_fixture("simple") model = csv_fixture_model(name="simple") importer = CSVImporter(data, model) importer.run() h.assert_equal(importer.errors, []) dataset = Dataset.find_one() h.assert_true(dataset is not None, "Dataset should not be None") entries = list(Entry.find({"dataset.name": dataset.name})) h.assert_equal(len(entries), 5) entry = entries[0] h.assert_equal(entry["from"]["label"], "Test From") h.assert_equal(entry["to"]["label"], "Test To") h.assert_equal(entry["time"]["unparsed"], "2010-01-01") h.assert_equal(entry["amount"], 100.00)
def test_logfile_path(self): h.assert_equal(daemon.logfile_path('test'), sys.prefix + '/var/log/openspendingetld_test.log')
def test_pidfile_path(self): h.assert_equal(daemon.pidfile_path('test'), sys.prefix + '/var/run/openspendingetld_test.pid')
def check_throws_one_error(self, importer): h.assert_equal(len(importer.errors), 1)