def handle(self, *args, **options): body = Body.objects.get(id=options["body-id"]) ags = options["ags"] or body.ags if not ags: raise CommandError( "The body doesn't have an associated amtliche Gemeindeschlüssel, please provide one with --ags" ) import_streets(body, ags)
def import_body_and_metadata( self, body_id: str, importer: Importer, userinput: str, ags: Optional[str], skip_body_extra: bool = False, ) -> Tuple[JSON, str]: logger.info(f"Fetching the body {body_id}") [body_data] = importer.load_bodies(body_id) logger.info("Importing the body") [body] = importer.import_bodies() importer.converter.default_body = body logger.info("Looking up the Amtliche Gemeindeschlüssel") if ags: if len(ags) != 5 and len(ags) != 8: logger.warning( "Your Amtlicher Gemeindeschlüssel has {} digits instead of 5 or 8" .format(len(ags))) body.ags = ags else: ags, match_name = self.get_ags(body, importer.loader.system, userinput) body.ags = ags # Sometimes there's a bad short name (e.g. "Rat" for Erkelenz), # so we use the name that's in wikidata instead body.short_name = match_name body.save() logger.info("Using {} as Amtliche Gemeindeschlüssel for '{}'".format( body.ags, body.short_name)) dotenv = "" if body.id != settings.SITE_DEFAULT_BODY: dotenv += f"SITE_DEFAULT_BODY={body.id}\n" if dotenv: logger.info( "Found the oparl endpoint. Please add the following line to your dotenv file " "(you'll be reminded again after the import finished): \n\n" + dotenv) if not skip_body_extra: logger.info("Importing the shape of the city") import_outline(body) logger.info("Importing the streets") import_streets(body) logger.info( f"Body {body.short_name} import with geo data successful.") else: logger.info( f"Body {body.short_name} import successful. " f"Don't forget to run import_streets, import_amenities and import_outline" ) return body_data.data, dotenv
def handle(self, *args, **options): input_file: Path = options["input"] logger.info("Loading the data") with input_file.open() as fp: json_data = json.load(fp) if json_data["format_version"] != format_version: raise CommandError( f"This version of {settings.PRODUCT_NAME} can only import json format version {format_version}, " f"but the json file you provided is version {json_data['format_version']}" ) ris_data: RisData = converter.structure(json_data, RisData) body = models.Body.objects.filter(name=ris_data.meta.name).first() if not body: logger.info("Building the body") if options["ags"] or ris_data.meta.ags: ags = options["ags"] or ris_data.meta.ags else: ags = city_to_ags(ris_data.meta.name, False) if not ags: raise RuntimeError( f"Failed to determine the Amtliche Gemeindeschlüssel for '{ris_data.meta.name}'. " f"Please look it up yourself and specify it with `--ags`" ) logger.info(f"The Amtliche Gemeindeschlüssel is {ags}") body = models.Body( name=ris_data.meta.name, short_name=ris_data.meta.name, ags=ags ) body.save() if not options["skip_body_extra"]: import_outline(body) import_streets(body) else: logging.info("Using existing body") # TODO: Re-enable this after some more thorough testing # handle_counts(ris_data, options["allow_shrinkage"]) import_data(body, ris_data) fix_sort_date(datetime.datetime.now(tz=tz.tzlocal())) if not options["skip_download"]: Importer(BaseLoader(dict()), force_singlethread=True).load_files( fallback_city=body.short_name ) if not options["no_notify_users"]: logger.info("Sending notifications") NotifyUsers().notify_all()
def test_import_streets(self): body = Body.objects.get(id=1) import_streets(body, self.ags_tiny_city_called_bruecktal) self.assertEqual(SearchStreet.objects.count(), 9)
def handle(self, *args, **options): input_file: Path = options["input"] logger.info("Loading the data") with input_file.open() as fp: json_data = json.load(fp) if json_data["format_version"] != format_version: raise CommandError( f"This version of {settings.PRODUCT_NAME} can only import json format version {format_version}, " f"but the json file you provided is version {json_data['format_version']}" ) ris_data: RisData = converter.structure(json_data, RisData) body = models.Body.objects.filter(name=ris_data.meta.name).first() if not body: logger.info("Building the body") if options["ags"] or ris_data.meta.ags: ags = options["ags"] or ris_data.meta.ags else: ags = city_to_ags(ris_data.meta.name, False) if not ags: raise RuntimeError( f"Failed to determine the Amtliche Gemeindeschlüssel for '{ris_data.meta.name}'. " f"Please look it up yourself and specify it with `--ags`" ) logger.info(f"The Amtliche Gemeindeschlüssel is {ags}") body = models.Body(name=ris_data.meta.name, short_name=ris_data.meta.name, ags=ags) body.save() if not options["skip_body_extra"]: import_outline(body) import_streets(body) else: logging.info("Using existing body") # TODO: Reenable this after some more thorough testing # handle_counts(ris_data, options["allow_shrinkage"]) flush_model(models.Paper) self.import_papers(ris_data) self.import_files(ris_data) paper_id_map = make_id_map(models.Paper.objects) file_id_map = make_id_map(models.File.objects) flush_model(models.Paper.files.through) self.import_paper_files(ris_data, paper_id_map, file_id_map) flush_model(models.Organization) self.import_organizations(body, ris_data) self.import_meeting_locations(ris_data) locations = dict( models.Location.objects.values_list("description", "id")) flush_model(models.Meeting) self.import_meetings(ris_data, locations) meeting_id_map = make_id_map( models.Meeting.objects.filter(oparl_id__isnull=False)) organization_name_id_map = dict( models.Organization.objects.values_list("name", "id")) flush_model(models.Meeting.organizations.through) self.import_meeting_organization(meeting_id_map, organization_name_id_map, ris_data) flush_model(models.Person) self.import_persons(ris_data) flush_model(models.Consultation) self.import_consultations(ris_data, meeting_id_map, paper_id_map) # We don't have original ids for all agenda items (yet?), # so we just assume meeting x paper is unique consultation_map = { (a, b): c for a, b, c in models.Consultation.objects.values_list( "meeting_id", "paper_id", "id") } flush_model(models.AgendaItem) self.import_agenda_items(ris_data, consultation_map, meeting_id_map, paper_id_map) flush_model(models.Membership) self.import_memberships(ris_data) fix_sort_date(fallback_date, datetime.datetime.now(tz=tz.tzlocal())) # With the current bulk indexing we need to do this manually call_command("search_index", action="populate") if not options["skip_download"]: Importer(BaseLoader(dict()), force_singlethread=True).load_files( fallback_city=body.short_name)
def handle(self, *args, **options): body = Body.objects.get(id=options["body-id"]) import_streets(body, options["ags"])