repository) self.set_changed() self.set_monuments_all_id("number") self.set_wlm_source() self.set_heritage_id() self.set_heritage() self.set_coords() self.set_commonscat() self.set_image("pilt") self.set_country() self.set_adm_location() self.update_heritage() self.set_is() self.set_location() self.set_labels("et", self.nimi) self.set_descriptions() self.set_wd_item(self.find_matching_wikidata(mapping)) if __name__ == "__main__": """Command line entry point for importer.""" args = importer.handle_args() dataset = Dataset("ee", "et", EeEt) dataset.data_files = { "municipalities": "estonia_municipalities.json", "settlements": "estonia_settlements.json", "counties": "estonia_counties.json" } dataset.lookup_downloads = {"heritage_types": "ee (et)/types"} importer.main(args, dataset)
def make_dataset(country, language): """ Construct a dataset instance for the provided country and language codes. Only kept for backwards-compatibility with older monument classes. """ from Monument import Dataset from CzCs import CzCs from AtDe import AtDe from DkBygningDa import DkBygningDa from DkFortidsDa import DkFortidsDa from EeEt import EeEt from HuHu import HuHu from IeEn import IeEn from NoNo import NoNo from PlPl import PlPl from PtPt import PtPt from SeArbetslSv import SeArbetslSv from SeBbrSv import SeBbrSv from SeShipSv import SeShipSv from ZaEn import ZaEn SPECIFIC_TABLES = { "monuments_se-ship_(sv)": { "class": SeShipSv, "data_files": { "functions": "se-ship_(sv)_functions.json" } }, "monuments_cz_(cs)": { "class": CzCs, "data_files": {} }, "monuments_hu_(hu)": { "class": HuHu, "data_files": {} }, "monuments_pt_(pt)": { "class": PtPt, "data_files": {} }, "monuments_ie_(en)": { "class": IeEn, "data_files": { "counties": "ireland_counties.json" } }, "monuments_za_(en)": { "class": ZaEn, "data_files": {} }, "monuments_at_(de)": { "class": AtDe, "data_files": { "municipalities": "austria_municipalities.json" }, "lookup_downloads": { "types": "at_(de)/types" } }, "monuments_dk-bygninger_(da)": { "class": DkBygningDa, "data_files": {}, "subclass_downloads": { "settlement": "Q486972" } }, "monuments_pl_(pl)": { "class": PlPl, "data_files": { "settlements": "poland_settlements.json" } }, "monuments_dk-fortidsminder_(da)": { "class": DkFortidsDa, "data_files": { "types": "dk-fortidsminder_(da)_types.json", "municipalities": "denmark_municipalities.json" } }, "monuments_no_(no)": { "class": NoNo, "data_files": {} }, "monuments_se-bbr_(sv)": { "class": SeBbrSv, "data_files": { "functions": "se-bbr_(sv)_functions.json", "settlements": "sweden_settlements.json" } }, "monuments_ee_(et)": { "class": EeEt, "data_files": { "counties": "estonia_counties.json" } }, "monuments_se-arbetsl_(sv)": { "class": SeArbetslSv, "data_files": { "municipalities": "sweden_municipalities.json", "types": "se-arbetsl_(sv)_types.json", "settlements": "sweden_settlements.json" } } } specific_table_name = utils.get_specific_table_name(country, language) specific_table = None if specific_table_name in SPECIFIC_TABLES: specific_table = SPECIFIC_TABLES[specific_table_name] else: print("No class defined for {0}.".format(specific_table_name)) exit() dataset = Dataset(country, language, specific_table["class"]) dataset.data_files = specific_table.get("data_files") dataset.lookup_downloads = specific_table.get("lookup_downloads") dataset.subclass_downloads = specific_table.get("subclass_downloads") return dataset
Monument.__init__(self, db_row_dict, mapping, data_files, existing, repository) self.set_monuments_all_id("id") self.set_changed() self.set_wlm_source() self.set_country() self.set_adm_location() self.set_location() self.set_is() self.set_special_is() self.set_image() self.set_inception() self.set_commonscat() self.set_coords() self.set_heritage() self.set_heritage_id() self.update_labels() self.update_descriptions() self.set_wd_item(self.find_matching_wikidata(mapping)) if __name__ == "__main__": """Command line entry point for importer.""" args = importer.handle_args() dataset = Dataset("gh", "en", GhEn) dataset.data_files = { "regions": "ghana_regions.json" # http://tinyurl.com/y9ye4kfg } dataset.lookup_downloads = {"is": "gh_(en)/original function"} importer.main(args, dataset)
Monument.__init__(self, db_row_dict, mapping, data_files, existing, repository) self.set_monuments_all_id("id") self.set_changed() self.set_wlm_source() self.set_coords() self.set_is() self.set_country() self.set_coords() self.set_heritage() self.set_heritage_id() self.set_adm_location() self.set_directions() self.set_image() self.set_commonscat() self.update_descriptions() self.update_labels() self.set_wd_item(self.find_matching_wikidata(mapping)) if __name__ == "__main__": """Point of entrance for importer.""" args = importer.handle_args() dataset = Dataset("sv", "es", SvEs) dataset.lookup_downloads = {"heritage_type": "sv_(es)/tipo"} dataset.data_files = { "departments": "salvador_departments.json", "municipalities": "salvador_municipalities.json" } importer.main(args, dataset)
return super().exists_with_monument_article("de", "artikel") def __init__(self, db_row_dict, mapping, data_files, existing, repository): Monument.__init__(self, db_row_dict, mapping, data_files, existing, repository) self.set_monuments_all_id() self.set_changed() self.wlm_source = self.create_wlm_source(self.monuments_all_id) self.update_labels() self.set_descriptions() self.set_is() self.set_type() self.set_country() self.set_image("foto") self.set_heritage() self.set_heritage_id() self.set_adm_location() self.set_street_address() self.set_coords(("lat", "lon")) self.set_commonscat() self.set_wd_item(self.find_matching_wikidata(mapping)) if __name__ == "__main__": """Point of entrance for importer.""" args = importer.handle_args() dataset = Dataset("at", "de", AtDe) dataset.data_files = {"municipalities": "austria_municipalities.json"} dataset.lookup_downloads = {"types": "at_(de)/types"} importer.main(args, dataset)
self.set_source() self.set_registrant_url() self.set_heritage_id() self.set_heritage() self.set_country() self.set_image() self.set_adm_location() self.set_commonscat() self.set_architect() self.set_style() # self.set_is() self.set_special_is() self.set_coords() self.set_building_year() self.set_address_and_disambig() self.update_labels() self.update_descriptions() self.set_wd_item(self.find_matching_wikidata(mapping)) if __name__ == "__main__": """Point of entrance for importer.""" args = importer.handle_args() dataset = Dataset("be-bru", "nl", BeBruNl) dataset.data_files = {"municipalities": "belgium_municipalities.json"} dataset.lookup_downloads = { "type": "be-bru (nl)/objtype", "style": "be-bru (nl)/bouwstijl" } importer.main(args, dataset)
self.NATIONAL_IMPORTANCE_STR = "ეროვნული" self.NATIONAL_IMPORTANCE_Q = "Q34480057" self.set_monuments_all_id("id") self.set_changed() self.set_wlm_source() self.set_heritage_id() self.set_heritage() self.set_country() self.set_coords() self.set_adm_location() self.set_address() self.set_is() self.set_image() self.set_commonscat() self.set_inception() self.update_labels() self.update_descriptions() self.set_wd_item(self.find_matching_wikidata(mapping)) if __name__ == "__main__": """Command line entry point for importer.""" args = importer.handle_args() dataset = Dataset("ge", "ka", GeKa) dataset.data_files = { "admin": "georgia_admin.json", "settlements": "georgia_settlements.json" } dataset.lookup_downloads = {"types": "ge (ka)/types"} importer.main(args, dataset)
Monument.__init__(self, db_row_dict, mapping, data_files, existing, repository) self.set_monuments_all_id("idno") self.set_changed() self.set_wlm_source() self.set_country() self.set_heritage_id() self.set_heritage() self.set_is() self.set_special_is() self.set_adm_location() self.set_location() self.set_coords() self.set_image() self.update_descriptions() self.update_labels() # there's no commonscat in dataset self.set_wd_item(self.find_matching_wikidata(mapping)) if __name__ == "__main__": """Point of entrance for importer.""" args = importer.handle_args() dataset = Dataset("al", "sq", AlSq) dataset.data_files = { "settlements": "albania_settlements.json", "municipalities": "albania_municipalities.json" } dataset.lookup_downloads = {"is": "al_(sq)/type"} importer.main(args, dataset)
try: possible_socken = [ x["item"] for x in socken_dict if x["itemLabel"].startswith(self.socken) ] if len(possible_socken) != 1: raise ValueError self.add_statement("location", possible_socken[0]) except (IndexError, ValueError): raw_socken = "{} ({})".format(self.socken, self.landskap) self.add_to_report("socken", raw_socken) def set_monuments_all_id(self): """Map which column name in specific table to ID in monuments_all.""" self.monuments_all_id = self.id def exists_with_monument_article(self, language): return super().exists_with_monument_article("sv", "artikel") if __name__ == "__main__": """Command line entry point for importer.""" args = importer.handle_args() dataset = Dataset("se-fornmin", "sv", SeFornminSv) dataset.data_files = { "municipalities": "sweden_municipalities.json", "socken": "sweden_socken.json" } dataset.lookup_downloads = {"types": "se-fornmin_(sv)/types"} importer.main(args, dataset)
def __init__(self, db_row_dict, mapping, data_files, existing, repository): Monument.__init__(self, db_row_dict, mapping, data_files, existing, repository) self.set_monuments_all_id("idno") self.set_changed() self.set_wlm_source() self.set_heritage_id() self.set_coords() self.set_country() self.set_adm_location() self.set_location() self.set_is() self.set_heritage() self.set_inception() self.update_labels() self.update_descriptions() self.set_wd_item(self.find_matching_wikidata(mapping)) if __name__ == "__main__": """Point of entrance for importer.""" args = importer.handle_args() dataset = Dataset("xk", "sq", XkSq) dataset.data_files = {"settlements": "kosovo_settlements.json"} dataset.lookup_downloads = { "municipalities": "xk (sq)/municipalities", "categories": "xk (sq)/category" } importer.main(args, dataset)