def handle(self, *args, **options): input_file: Path = options["input"] logger.info("Loading the data") with input_file.open() as fp: json_data = json.load(fp) if json_data["format_version"] != format_version: raise CommandError( f"This version of {settings.PRODUCT_NAME} can only import json format version {format_version}, " f"but the json file you provided is version {json_data['format_version']}" ) ris_data: RisData = converter.structure(json_data, RisData) body = models.Body.objects.filter(name=ris_data.meta.name).first() if not body: logger.info("Building the body") if options["ags"] or ris_data.meta.ags: ags = options["ags"] or ris_data.meta.ags else: ags = city_to_ags(ris_data.meta.name, False) if not ags: raise RuntimeError( f"Failed to determine the Amtliche Gemeindeschlüssel for '{ris_data.meta.name}'. " f"Please look it up yourself and specify it with `--ags`" ) logger.info(f"The Amtliche Gemeindeschlüssel is {ags}") body = models.Body( name=ris_data.meta.name, short_name=ris_data.meta.name, ags=ags ) body.save() if not options["skip_body_extra"]: import_outline(body) import_streets(body) else: logging.info("Using existing body") # TODO: Re-enable this after some more thorough testing # handle_counts(ris_data, options["allow_shrinkage"]) import_data(body, ris_data) fix_sort_date(datetime.datetime.now(tz=tz.tzlocal())) if not options["skip_download"]: Importer(BaseLoader(dict()), force_singlethread=True).load_files( fallback_city=body.short_name ) if not options["no_notify_users"]: logger.info("Sending notifications") NotifyUsers().notify_all()
def get_ags(self, body: Body, system: JSON, userinput: str) -> Tuple[str, str]: """ This function tries: 1. The ags field in the oparl body 2. Querying wikidata with a) the body's short name b) the user's input c) the body's full name d) the system's name e) locality in the location Returns the ags and the name that did match """ ags = body.ags if ags: if len(ags) == 8 or len(ags) == 5: return ags, body.short_name else: logger.error("Ignoring ags '{}' with invalid legth {}".format( ags, len(ags))) district = bool( re.match(settings.DISTRICT_REGEX, body.name, re.IGNORECASE)) to_check = [ ("body short name", body.short_name), ("user input", userinput), ("body name", body.name), ] if system.get("name"): short_system_name = self.utils.normalize_body_name(system["name"]) to_check.append(("system name", short_system_name)) if body.center and body.center.locality: locality = body.center.locality to_check.append(("body location locality", locality)) for source, value in to_check: ags = city_to_ags(value, district) if ags: logger.debug("Found ags using the {}: '{}'".format( source, value)) return ags, value raise RuntimeError( "Could not determine the Amtliche Gemeindeschlüssel using {}". format(to_check))
def handle(self, *args, **options): input_file: Path = options["input"] logger.info("Loading the data") with input_file.open() as fp: json_data = json.load(fp) if json_data["format_version"] != format_version: raise CommandError( f"This version of {settings.PRODUCT_NAME} can only import json format version {format_version}, " f"but the json file you provided is version {json_data['format_version']}" ) ris_data: RisData = converter.structure(json_data, RisData) body = models.Body.objects.filter(name=ris_data.meta.name).first() if not body: logger.info("Building the body") if options["ags"] or ris_data.meta.ags: ags = options["ags"] or ris_data.meta.ags else: ags = city_to_ags(ris_data.meta.name, False) if not ags: raise RuntimeError( f"Failed to determine the Amtliche Gemeindeschlüssel for '{ris_data.meta.name}'. " f"Please look it up yourself and specify it with `--ags`" ) logger.info(f"The Amtliche Gemeindeschlüssel is {ags}") body = models.Body(name=ris_data.meta.name, short_name=ris_data.meta.name, ags=ags) body.save() if not options["skip_body_extra"]: import_outline(body) import_streets(body) else: logging.info("Using existing body") # TODO: Reenable this after some more thorough testing # handle_counts(ris_data, options["allow_shrinkage"]) flush_model(models.Paper) self.import_papers(ris_data) self.import_files(ris_data) paper_id_map = make_id_map(models.Paper.objects) file_id_map = make_id_map(models.File.objects) flush_model(models.Paper.files.through) self.import_paper_files(ris_data, paper_id_map, file_id_map) flush_model(models.Organization) self.import_organizations(body, ris_data) self.import_meeting_locations(ris_data) locations = dict( models.Location.objects.values_list("description", "id")) flush_model(models.Meeting) self.import_meetings(ris_data, locations) meeting_id_map = make_id_map( models.Meeting.objects.filter(oparl_id__isnull=False)) organization_name_id_map = dict( models.Organization.objects.values_list("name", "id")) flush_model(models.Meeting.organizations.through) self.import_meeting_organization(meeting_id_map, organization_name_id_map, ris_data) flush_model(models.Person) self.import_persons(ris_data) flush_model(models.Consultation) self.import_consultations(ris_data, meeting_id_map, paper_id_map) # We don't have original ids for all agenda items (yet?), # so we just assume meeting x paper is unique consultation_map = { (a, b): c for a, b, c in models.Consultation.objects.values_list( "meeting_id", "paper_id", "id") } flush_model(models.AgendaItem) self.import_agenda_items(ris_data, consultation_map, meeting_id_map, paper_id_map) flush_model(models.Membership) self.import_memberships(ris_data) fix_sort_date(fallback_date, datetime.datetime.now(tz=tz.tzlocal())) # With the current bulk indexing we need to do this manually call_command("search_index", action="populate") if not options["skip_download"]: Importer(BaseLoader(dict()), force_singlethread=True).load_files( fallback_city=body.short_name)