def update_tourismus(): print("Lade aktuelle Maßnahmen der Länder herunter.") openURL = urllib.request.urlopen('https://tourismus-wegweiser.de/json') if (openURL.getcode() == 200): data = json.loads(openURL.read()) #Alte Verknüpfungen löschen source_text = "https://tourismus-wegweiser.de/json" so = sources.query.filter(sources.text == source_text).first() if so: #db.session.execute(regionHasGroup.__table__.update().where(and_(regionHasGroup.autolinked == True, regionHasGroup.source_id == so.id)).values(is_deleted=True)) db.session.execute(regionHasGroup.__table__.delete().where( and_(regionHasGroup.autolinked == True, regionHasGroup.source_id == so.id))) db.session.flush() for d in data: region_id = createRegionIfNotExists(d["Bundesland"]).id for title in d["allgemein"]: mkdown = markdownify(d["allgemein"][title]["text"]) makeMeasure(mkdown, region_id=region_id, title=title, source="https://tourismus-wegweiser.de/json") for m in d["tourismus"]: for title in m: mkdown = "" for index in [ "Öffnung und Zugang", "Aufenthalt und Hygiene" ]: if m[title][index]["text"] != "": mkdown += "# " + index + "\n" mkdown += markdownify(m[title][index]["text"]) #TODO: use configuration instead for option to translate titles if m[title]["Weitere Informationen"] != "": mkdown += "# Weitere Informationen\n" #TODO: use configuration instead for option to translate titles if not isinstance(m[title]["Weitere Informationen"], str): print(region_id, title, m[title]["Weitere Informationen"]) mkdown += markdownify( m[title]["Weitere Informationen"]) makeMeasure(mkdown, region_id=region_id, title=title, source="https://tourismus-wegweiser.de/json") else: print("ERROR loading Maßnahmen") print(openURL) db.session.flush()
def initialize(overwriteLinks=False): print("Lade Landkreise herunter. Das kann etwas dauern.") #openURL = urllib.request.urlopen('https://services7.arcgis.com/mOBPykOjAyBO2ZKk/arcgis/rest/services/RKI_Landkreisdaten/FeatureServer/0/query?where=1%3D1&outFields=GEN,Shape__Length,cases7_per_100k,RS,BL,BEZ&outSR=4326&f=json') openURL = urllib.request.urlopen( 'https://services7.arcgis.com/mOBPykOjAyBO2ZKk/arcgis/rest/services/RKI_Landkreisdaten/FeatureServer/0/query?where=1%3D1&outFields=*&outSR=4326&f=json' ) if (openURL.getcode() == 200): data = json.loads(openURL.read()) neueKreise = [] for kreis in data["features"]: coordinates = [] for ring in kreis["geometry"]["rings"]: a = vw.simplify(ring, ratio=0.1) for pair in a: tmp = pair[0] pair[0] = pair[1] pair[1] = tmp coordinates.append(a) if not districts.query.get(int(kreis["attributes"]["RS"])): dnew = districts( int(kreis["attributes"]["RS"]), kreis["attributes"]["GEN"], int(kreis["attributes"]["cases7_per_100k"]), coloring(int(kreis["attributes"]["cases7_per_100k"])), coordinates, kreis["attributes"]["BEZ"]) dnew.region = createRegionIfNotExists( kreis["attributes"]["BL"]) db.session.add(dnew) db.session.flush() neueKreise.append(int(kreis["attributes"]["RS"])) # Links zu den Kreisen erstellen mainLinks(neueKreise, overwrite=overwriteLinks) # Default - Categorien erstellen getOrmakeCategory("Kontaktbestimmungen", is_OFP=True, weight=-300, force=True) getOrmakeCategory("Geschäfte", is_OFP=True, weight=-200, force=True) #getOrmakeCategory("Private Feiern", is_OFP = True, weight =-100, force=True) getOrmakeCategory("Bußgelder", is_OFP=True, weight=200, force=True) getOrmakeCategory("Impf-Informationen", is_OFP=True, weight=300, force=True) db.session.flush() else: print("ERROR loading Landkreise")
def initialize(): print("Lade Landkreise herunter. Das kann etwas dauern.") openURL = urllib.request.urlopen( 'https://services7.arcgis.com/mOBPykOjAyBO2ZKk/arcgis/rest/services/RKI_Landkreisdaten/FeatureServer/0/query?where=1%3D1&outFields=GEN,Shape__Length,cases7_per_100k,RS,BL,BEZ&outSR=4326&f=json' ) if (openURL.getcode() == 200): data = json.loads(openURL.read()) neueKreise = [] for kreis in data["features"]: coordinates = [] for ring in kreis["geometry"]["rings"]: a = vw.simplify(ring, ratio=0.1) for pair in a: tmp = pair[0] pair[0] = pair[1] pair[1] = tmp coordinates.append(a) if not districts.query.get(int(kreis["attributes"]["RS"])): dnew = districts( int(kreis["attributes"]["RS"]), kreis["attributes"]["GEN"], int(kreis["attributes"]["cases7_per_100k"]), coloring(int(kreis["attributes"]["cases7_per_100k"])), [], coordinates, kreis["attributes"]["BEZ"]) dnew.region = createRegionIfNotExists( kreis["attributes"]["BL"]) db.session.add(dnew) db.session.flush() neueKreise.append(int(kreis["attributes"]["RS"])) with open('links.json') as f: data = json.load(f) for d in data: if d["id"] in neueKreise: dist = districts.query.get(d["id"]) dist.links = [{ "href": d["link"], "title": "Zur Webseite des Kreises" }] if not users.query.filter(users.username == "cms").first( ): #TODO: REMOVE THIS FOR PRODUCTION!!!! (and add hashing) u = users("cms", "pw") db.session.add(u) db.session.commit() else: print("ERROR loading Landkreise")
def part1(): with open('landkreise.json') as f: data = json.load(f) result = {"ok": [], "err": []} for d in data: region_id = createRegionIfNotExists(d["Bundesland"]).id print(region_id) html_soup = bs4.BeautifulSoup(d["Regionale Einschränkungen"], 'html.parser') for l in html_soup.findAll('a'): category = None name = None if l.text[0:10] == "Landkreis ": category = "Landkreis" name = l.text[10:] elif l.text[-10:] == " Landkreis": category = "Landkreis" name = l.text[:-11] elif l.text[0:11] == "Stadtkreis ": category = "Stadtkreis" name = l.text[11:] elif l.text[0:17] == "Kreisfreie Stadt ": category = "Kreisfreie Stadt" name = l.text[17:] elif l.text[-17:] == " kreisfreie Stadt": category = "Kreisfreie Stadt" name = l.text[:-18] elif l.text[0:6] == "Stadt ": category = "Kreisfreie Stadt" name = l.text[6:] elif l.text[0:6] == "Kreis ": category = "Landkreis" name = l.text[6:] elif not "RKI" in l.text: name = l.text if name != None: try: if category != None: if category == "Landkreis": d = districts.query.filter( districts.name.like("%{}%".format(name)), districts.region_id == region_id, or_(districts.category == "Landkreis", districts.category == "Kreis")).one() else: d = districts.query.filter( districts.name.like("%{}%".format(name)), districts.region_id == region_id, districts.category == category).one() else: d = districts.query.filter( districts.name.like("%{}%".format(name)), districts.region_id == region_id).one() result["ok"].append({ "id": d.id, "link": l["href"], "comment": l.text }) except NoResultFound: result["err"].append({ "id": None, "link": l["href"], "comment": l.text }) except MultipleResultsFound: result["err"].append({ "id": None, "link": l["href"], "comment": l.text }) with open('districtlinks.json', 'w') as json_file: json.dump(result, json_file)
def update(): print("Lade Aktuelle Daten des RKI herunter.") openURL = urllib.request.urlopen( 'https://services7.arcgis.com/mOBPykOjAyBO2ZKk/arcgis/rest/services/RKI_Landkreisdaten/FeatureServer/0/query?where=1%3D1&outFields=cases7_per_100k,RS&returnGeometry=false&outSR=4326&f=json' ) if (openURL.getcode() == 200): data = json.loads(openURL.read()) for kreis in data["features"]: updateDistrictIncidence( int(kreis["attributes"]["RS"]), int(kreis["attributes"]["cases7_per_100k"]), coloring(int(kreis["attributes"]["cases7_per_100k"]))) else: print("ERROR loading Landkreise") print(openURL) print("Lade aktuelle Maßnahmen der Länder herunter.") openURL = urllib.request.urlopen('https://tourismus-wegweiser.de/json') if (openURL.getcode() == 200): data = json.loads(openURL.read()) #Alte Verknüpfungen löschen source_text = "https://tourismus-wegweiser.de/json" so = sources.query.filter(sources.text == source_text).first() if so: db.session.execute(regionHasGroup.__table__.update().where( and_(regionHasGroup.autolinked == True, regionHasGroup.source_id == so.id)).values( is_deleted=True )) #TODO: mark as deleted instead of actually deleting it db.session.flush() for d in data: region_id = createRegionIfNotExists(d["Bundesland"]).id for title in d["allgemein"]: html_soup = bs4.BeautifulSoup(d["allgemein"][title]["text"], 'html.parser') mkdown = "" for c in html_soup.contents: mk = "" for l in c: if isinstance(l, bs4.element.Tag): if l.name == "a": mk += "[" + l.string + "](" + l["href"] + ")" else: Exception("Unhandeled tag") elif isinstance(l, bs4.element.NavigableString): mk += str(l) if mk != "": mkdown += "* " + mk + "\n" m = makeMeasure(mkdown, region_id, title=title) for m in d["tourismus"]: for title in m: mkdown = "" for index in [ "Öffnung und Zugang", "Aufenthalt und Hygiene" ]: if m[title][index]["text"] != "": mkdown += "# " + index + "\n" #TODO: use configuration instead for option to translate titles html_soup = bs4.BeautifulSoup( m[title][index]["text"], 'html.parser') for c in html_soup.contents: mk = "" for l in c: if isinstance(l, bs4.element.Tag): if l.name == "a": mk += "[" + l.string + "](" + l[ "href"] + ")" else: Exception("Unhandeled tag") elif isinstance( l, bs4.element.NavigableString): mk += str(l) if mk != "": mkdown += mk + "\n" if m[title]["Weitere Informationen"] != "": mkdown += "# Weitere Informationen\n" #TODO: use configuration instead for option to translate titles html_soup = bs4.BeautifulSoup(m[title][index]["text"], 'html.parser') for c in html_soup.contents: mk = "" for l in c: if isinstance(l, bs4.element.Tag): if l.name == "a": mk += "[" + l.string + "](" + l[ "href"] + ")" else: Exception("Unhandeled tag") elif isinstance(l, bs4.element.NavigableString): mk += str(l) if mk != "": mkdown += mk + "\n" m = makeMeasure(mkdown, region_id, title=title) db.session.commit() else: print("ERROR loading Maßnahmen") print(openURL)