def test_line_in_error(self): importer = MappingImporter() data = csv_fixture("wrong-objecttype-mapping").read() try: importer.import_from_string(data) except ValueError, E: errors = E.args[0] h.assert_equal(len(errors), 1) h.assert_equal(errors[0]["line"], 2) return
def test_nested_classifier_columns(self): data = csv_fixture("nested-mapping").read() importer = MappingImporter() mapping = importer.import_from_string(data) to_fields = mapping["to"]["fields"] h.assert_equal(len(to_fields), 2) h.assert_equal(to_fields[0]["column"], u"paid_to") h.assert_equal(to_fields[0]["name"], "label") h.assert_equal(to_fields[1]["column"], u"paid_to_identifier") h.assert_equal(to_fields[1]["name"], "identifier")
def test_missing_columns(self): data = "Uninteresting,Columns\n" "Uninteresting,Values" importer = MappingImporter() try: importer.import_from_string(data) except AssertionError, E: h.assert_true('The Metadata document must have the columns "Original Field",' in str(E)) h.assert_true("The column(s)" in str(E)) h.assert_true("are missing." in str(E)) return
def _test_mapping(self, dir): mapping_csv = h.fixture_file("csv_import/%s/mapping.csv" % dir) mapping_json = h.fixture_file("csv_import/%s/mapping.json" % dir) csv = mapping_csv.read() expected_mapping_data = json.load(mapping_json) importer = MappingImporter() observed_mapping_data = importer.import_from_string(csv) assert observed_mapping_data == expected_mapping_data
def command(self): super(MappingConvertCommand, self).command() self._check_args_length(1) from openspending.lib import json from openspending.etl.mappingimporter import MappingImporter mapping_url = self.args[0] importer = MappingImporter() mapping = importer.import_from_url(mapping_url) print json.dumps(mapping, indent=2)
def test_wrong_objecttype(self): importer = MappingImporter() data = csv_fixture("wrong-objecttype-mapping").read() try: importer.import_from_string(data) except ValueError, E: errors = E.args[0] h.assert_equal(len(errors), 1) h.assert_equal(errors[0]["line"], 2) h.assert_equal( errors[0]["message"], (u'Value in column "ObjectType" is "entit". ' u'Allowed values: "classifier", "entity", ' u'"value"'), ) return
def test_empty_mapping(self): data = csv_fixture("simple-mapping").read() importer = MappingImporter() mapping = importer.import_from_string(data) h.assert_equal(sorted(mapping.keys()), [u"amount", u"currency", u"from", u"time", u"to"]) h.assert_equal( mapping["amount"], { "column": u"amount", "datatype": u"float", "default_value": u"x", "description": u"z", "label": u"y", "type": u"value", }, ) h.assert_equal( mapping["currency"], { "column": u"currency", "datatype": u"string", "default_value": u"GBP", "description": u"z", "label": u"y", "type": u"value", }, ) h.assert_equal( mapping["from"], { "description": u"z", "fields": [ { "column": u"paid_by", "constant": "", "datatype": u"string", "default_value": u"x", "name": "label", } ], "label": u"y", "type": u"entity", }, ) h.assert_equal( mapping["to"], { "description": u"z", "fields": [ { "column": u"paid_to", "constant": "", "datatype": u"string", "default_value": u"x", "name": "label", } ], "label": u"y", "type": u"entity", }, ) h.assert_equal( mapping["time"], { "column": u"date", "datatype": u"date", "default_value": u"x", "description": u"z", "label": u"y", "type": u"value", }, )