コード例 #1
0
class Source:
    def __init__(self, asId, hnId):
        self._asId = asId
        self._hnId = hnId
        self._ics = ICS(offset=1, regex="Erinnerung: Abfuhr (.*) morgen")

    def fetch(self):
        args = {
            "asId": self._asId,
            "hnId": self._hnId,
            "adresse": "MeineAdresse"
        }

        # get ics file
        r = requests.post(
            "https://www.stadtreinigung.hamburg/privatkunden/abfuhrkalender/Abfuhrtermin.ics",
            data=args,
        )

        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #2
0
class Source:
    def __init__(self, pois):
        self.ics = ICS()
        self.pois = pois

    def fetch(self):
        r = requests.get(
            url="https://www.kwb-goslar.de/output/options.php",
            params={
                "ModID": "48",
                "call": "ical",
                "pois": self.pois,
            },
            headers={
                "Referer": "https://www.kwb-goslar.de",
            },
        )

        if not r.ok:
            raise Exception(f"Error: failed to fetch url: {r.request.url}")

        dates = self.ics.convert(r.text)

        entries = []
        for d in dates:
            date, waste_type = d
            icon = ICON_MAP.get(waste_type, "mdi:trash-can-outline")
            entries.append(Collection(date=date, t=waste_type, icon=icon))

        return entries
コード例 #3
0
class Source:
    def __init__(self, city, street):
        self._city = city
        self._street = street
        self._ics = ICS()

    def fetch(self):
        # fetch "Gelber Sack"
        args = {"g": self._city}
        r = requests.get(
            "https://was-wolfsburg.de/subgelberweihgarten/php/abfuhrgelber.php",
            params=args,
        )

        entries = []
        match = re.findall(r"(\d{2})\.(\d{2})\.(\d{4})", r.text)
        for m in match:
            date = datetime.date(day=int(m[0]), month=int(m[1]), year=int(m[2]))
            entries.append(Collection(date, "Gelber Sack"))

        # fetch remaining collections
        args = {"ortabf": self._street}
        r = requests.post(
            "https://was-wolfsburg.de/subabfuhrtermine/ics_abfuhrtermine3.php",
            data=args,
        )
        dates = self._ics.convert(r.text)
        for d in dates:
            entries.append(Collection(d[0], d[1]))

        return entries
class Source:
    def __init__(self, standort):
        self._standort = standort
        self._ics = ICS()

    def fetch(self):

        now = datetime.datetime.now().date()

        r = requests.get(
            "https://stadtplan.dresden.de/project/cardo3Apps/IDU_DDStadtplan/abfall/ical.ashx",
            params={
                "STANDORT": self._standort,
                "DATUM_VON": now.strftime("%d.%m.%Y"),
                "DATUM_BIS": (now + datetime.timedelta(days=365)).strftime("%d.%m.%Y"),
            },
        )

        dates = self._ics.convert(r.text)

        # example: "Leerung Gelbe Tonne, Bio-Tonne"

        entries = []
        for d in dates:
            if d[1] == "Abfallkalender endet bald":
                continue

            types = d[1].removeprefix("Leerung ")
            for type in types.split(", "):
                entries.append(Collection(d[0], type))
        return entries
コード例 #5
0
class Source:
    def __init__(self, city, street):
        self._city = city
        self._street = street
        self._ics = ICS()

    def fetch(self):
        # retrieve list of cities
        r = requests.get("https://www.awr.de/api_v2/collection_dates/1/orte")
        cities = json.loads(r.text)

        # create city to id map from retrieved cities
        city_to_id = {
            city["ortsbezeichnung"]: city["ortsnummer"]
            for (city) in cities["orte"]
        }

        if self._city not in city_to_id:
            _LOGGER.error(f"city not found: {self._city}")
            return []

        cityId = city_to_id[self._city]

        # retrieve list of streets
        r = requests.get(
            f"https://www.awr.de/api_v2/collection_dates/1/ort/{cityId}/strassen"
        )
        streets = json.loads(r.text)

        # create street to id map from retrieved cities
        street_to_id = {
            street["strassenbezeichnung"]: street["strassennummer"]
            for (street) in streets["strassen"]
        }

        if self._street not in street_to_id:
            _LOGGER.error(f"street not found: {self._street}")
            return []

        streetId = street_to_id[self._street]

        # retrieve list of waste types
        r = requests.get(
            f"https://www.awr.de/api_v2/collection_dates/1/ort/{cityId}/abfallarten"
        )
        waste_types = json.loads(r.text)
        wt = "-".join([t["id"] for t in waste_types["abfallarten"]])

        # get ics file
        r = requests.get(
            f"https://www.awr.de/api_v2/collection_dates/1/ort/{cityId}/strasse/{streetId}/hausnummern/0/abfallarten/{wt}/kalender.ics"
        )

        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #6
0
class Source:
    def __init__(self, city, types, street=None):
        self._city = city
        self._street = street
        self._types = types
        self._ics = ICS()
        self._iconMap = {
            "Restmüll": "mdi:trash-can",
            "Grünabfall": "mdi:leaf",
            "Gelber Sack": "mdi:sack",
            "Papiertonne": "mdi:package-variant",
            "Bildschirm-/Kühlgeräte": "mdi:television-classic",
            "Schadstoffsammlung": "mdi:biohazard",
            "altmetalle": "mdi:nail",
        }

    def fetch(self):
        now = datetime.now()
        entries = self.fetch_year(now.year, self._city, self._street,
                                  self._types)
        if now.month == 12:
            # also get data for next year if we are already in december
            try:
                entries.extend(
                    self.fetch_year((now.year + 1), self._city, self._street,
                                    self._types))
            except Exception:
                # ignore if fetch for next year fails
                pass
        return entries

    def fetch_year(self, year, city, street, types):
        args = {
            "city": city,
            "street": street,
            "year": year,
            "types[]": types,
            "go_ics": "Download",
        }

        # get ics file
        r = requests.get("https://www.abfallkalender-zak.de", params=args)

        # parse ics file
        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            waste_type = d[1]
            next_pickup_date = d[0]

            entries.append(
                Collection(date=next_pickup_date,
                           t=waste_type,
                           icon=self._iconMap.get(waste_type,
                                                  "mdi:trash-can")))

        return entries
コード例 #7
0
class Source:
    def __init__(self, ort, dropzone, ics_with_drop=False):
        self._ort = ort
        self._dropzone = dropzone
        self._ics_with_drop = ics_with_drop
        self._ics = ICS()

    def fetch(self):
        now = datetime.datetime.now()
        entries = self.fetch_year(now.year)
        if now.month == 12:
            # also get data for next year if we are already in december
            try:
                entries.extend(self.fetch_year(now.year + 1))
            except Exception:
                # ignore if fetch for next year fails
                pass
        return entries

    def fetch_year(self, year):
        args = {
            "action": "execute_create_ics",
            "ort": self._ort,
            "dropzone": self._dropzone,
            "year": year,
            "ics_with_drop": "true" if self._ics_with_drop else "false",
        }

        # step 1: prepare ics file
        r = requests.post(
            "https://www.abfall-kreis-tuebingen.de/wp-admin/admin-ajax.php",
            data=args)

        # request returns a string with the format "\n\n\n\n\n\n\n\n<url>|<file>"
        # with url ::= https://www.abfall-kreis-tuebingen.de/wp-content/uploads/abfuhrtermine_id_XXXXXX.ics
        # and file ::= abfuhrtermine_id_XXXXXX.ics
        (url, file) = r.text.strip().split("|")
        # print(f"url = {url}, file = {file}")

        # step 2: get ics file
        r = requests.get(url)
        r.encoding = "utf-8"  # requests doesn't guess the encoding correctly
        ics_file = r.text

        # step 3: delete ics file
        r = requests.post(
            "https://www.abfall-kreis-tuebingen.de/wp-admin/admin-ajax.php",
            data={"action": "execute_remove_ics"},
        )

        # parse ics file
        dates = self._ics.convert(ics_file)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #8
0
class Source:
    def __init__(self, streetId=None, streetName=None):
        self._streetId = streetId
        self._streetName = streetName
        self._ics = ICS()

    def fetch(self):
        # get token
        if self._streetName:
            url = "https://wellington.govt.nz/layouts/wcc/GeneralLayout.aspx/GetRubbishCollectionStreets"
            data = {"partialStreetName": self._streetName}
            r = requests.post(url, json=data)
            data = json.loads(r.text)
            if len(data["d"]) == 0:
                raise Exception(
                    f"No result found for streetName {self._streetName}")
            if len(data["d"]) > 1:
                raise Exception(
                    f"More then one result returned for streetName {self._streetName}, be more specific or use streetId instead"
                )
            self._streetId = data["d"][0].get("Key")

        if not self._streetId:
            raise Exception("No streetId supplied")

        url = "https://wellington.govt.nz/~/ical/"
        params = {
            "type": "recycling",
            "streetId": self._streetId,
            "forDate": datetime.date.today(),
        }
        r = requests.get(url, params=params)

        if not r.text.startswith("BEGIN:VCALENDAR"):
            raise Exception(f"{self._streetId} is not a valid streetID")

        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            for wasteType in d[1].split("&"):
                wasteType = wasteType.strip()
                entries.append(
                    Collection(
                        d[0],
                        wasteType,
                        picture=PICTURE_MAP[wasteType],
                        icon=ICON_MAP[wasteType],
                    ))
        return entries
コード例 #9
0
class Source:
    def __init__(self, ort, strasse, hausnummer):
        self._ort = ort
        self._strasse = strasse
        self._hausnummer = hausnummer
        self._ics = ICS()

    def fetch(self):
        session = requests.session()

        r = session.get(
            "https://webudb.udb.at/WasteManagementUDB/WasteManagementServlet?SubmitAction=wasteDisposalServices&InFrameMode=TRUE"
        )

        # add all hidden input fields to form data
        p = HiddenInputParser()
        p.feed(r.text)
        args = p.args

        args["Focus"] = "Hausnummer"
        args["SubmitAction"] = "forward"
        args["Ort"] = self._ort
        args["Strasse"] = self._strasse
        args["Hausnummer"] = self._hausnummer
        r = session.post(
            "https://webudb.udb.at/WasteManagementUDB/WasteManagementServlet",
            data=args)

        args["ApplicationName"] = "com.athos.kd.udb.AbfuhrTerminModel"
        args["Focus"] = None
        args["IsLastPage"] = "true"
        args["Method"] = "POST"
        args["PageName"] = "Terminliste"
        args["SubmitAction"] = "filedownload_ICAL"
        del args["Ort"]
        del args["Strasse"]
        del args["Hausnummer"]
        r = session.post(
            "https://webudb.udb.at/WasteManagementUDB/WasteManagementServlet",
            data=args)

        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #10
0
class Source:
    def __init__(self, district, city, street=None):
        self._district = district
        self._city = city
        self._street = street
        self._ics = ICS()

    def fetch(self):
        session = requests.Session()

        params = {
            "Abfuhrbezirk": self._district,
            "Ortschaft": self._city,
            "Strasse": self._street,
        }

        r = requests.post(
             "https://www.awb-lm.de/generator/abfuhrtermine.php",
             data=params
        )
        
        r.raise_for_status()
        
        soup = BeautifulSoup(r.text, features="html.parser")
        downloads = soup.find_all("a", href=True)
        ics_url = None
        for download in downloads:
            href = download.get("href")
            if "cache/ical" in href:
                ics_url = href

        if ics_url is None:
            raise Exception(f"ics url not found")

        # get ics file
        r = session.get("https://www.awb-lm.de" + ics_url, headers=HEADERS)
        r.raise_for_status()

        # parse ics file
        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1].split(" am ")[0]))
        return entries
コード例 #11
0
class Source:
    def __init__(self, street, house_number):
        self._street = street
        self._house_number = house_number
        self._ics = ICS()

    def fetch(self):
        params = {
            "name": self._street,
        }

        # get list of streets and house numbers
        r = requests.get(
            "https://stadtreinigung-leipzig.de/rest/wastecalendarstreets",
            params=params)

        data = json.loads(r.text)
        if len(data["results"]) == 0:
            _LOGGER.error(f"street not found: {self._street}")
            return []
        street_entry = data["results"].get(self._street)
        if street_entry is None:
            _LOGGER.error(f"street not found: {self._street}")
            return []

        id = street_entry.get(str(self._house_number))
        if id is None:
            _LOGGER.error(f"house_number not found: {self._house_number}")
            return []

        # get ics file
        params = {
            "position_nos": id,
        }
        r = requests.get(
            "https://stadtreinigung-leipzig.de/wir-kommen-zu-ihnen/abfallkalender/ical.ics",
            params=params,
        )
        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1].removesuffix(", ")))
        return entries
コード例 #12
0
class Source:
    def __init__(self, street, house_number):
        self._street = street
        self._house_number = house_number
        self._ics = ICS(regex=r"(.*)\:\s*\!")

    def fetch(self):

        args = {
            "id":
            430,
            "tx_citkoabfall_abfallkalender[strasse]":
            str(self._street).encode("utf-8"),
            "tx_citkoabfall_abfallkalender[hausnummer]":
            str(self._house_number).encode("utf-8"),
            "tx_citkoabfall_abfallkalender[abfallarten][0]":
            61,
            "tx_citkoabfall_abfallkalender[abfallarten][1]":
            60,
            "tx_citkoabfall_abfallkalender[abfallarten][2]":
            59,
            "tx_citkoabfall_abfallkalender[abfallarten][3]":
            58,
            "tx_citkoabfall_abfallkalender[action]":
            "ics",
            "tx_citkoabfall_abfallkalender[controller]":
            "FrontendIcs",
        }

        # use '%20' instead of '+' in URL
        # https://stackoverflow.com/questions/21823965/use-20-instead-of-for-space-in-python-query-parameters
        args = urllib.parse.urlencode(args, quote_via=urllib.parse.quote)

        # post request
        r = requests.get(URL, params=args)

        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #13
0
class Source:
    def __init__(self, city, street=None):
        self._city = city
        self._street = street
        self._ics = ICS()

    def fetch(self):
        session = requests.Session()

        params = {
            "city": self._city,
            "street": self._street,
            "direct": "true",
        }
        r = session.get(
            "https://www.awb-es.de/abfuhr/abfuhrtermine/__Abfuhrtermine.html",
            params=params,
        )
        r.raise_for_status()

        soup = BeautifulSoup(r.text, features="html.parser")
        downloads = soup.find_all("a", href=True)
        ics_url = None
        for download in downloads:
            href = download.get("href")
            if "t=ics" in href:
                ics_url = href

        if ics_url is None:
            raise Exception(f"ics url not found")

        # get ics file
        r = session.get(ics_url, headers=HEADERS)
        r.raise_for_status()

        # parse ics file
        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #14
0
class Source:
    def __init__(self, hnId, asId=None):
        self._hnId = hnId
        self._ics = ICS()

    def fetch(self):
        args = {"hnIds": self._hnId, "adresse": "MeineAdresse"}

        # get ics file
        r = requests.get(
            "https://backend.stadtreinigung.hamburg/kalender/abholtermine.ics",
            params=args,
        )

        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #15
0
class Source:
    def __init__(self, url):
        self._url = url
        self._ics = ICS()

    def fetch(self):
        r = requests.get(self._url)
        if r.status_code != 200:
            _LOGGER.error("Error querying calendar data")
            return []

        fixed_text = r.text.replace("REFRESH - INTERVAL; VALUE = ",
                                    "REFRESH-INTERVAL;VALUE=")

        dates = self._ics.convert(fixed_text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #16
0
class Source:
    def __init__(self, city, types, street=None):
        self._city = city
        self._street = street
        self._types = types
        self._ics = ICS()

    def fetch(self):
        now = datetime.now()
        entries = self.fetch_year(now.year, self._city, self._street,
                                  self._types)
        if now.month == 12:
            # also get data for next year if we are already in december
            try:
                entries.extend(
                    self.fetch_year((now.year + 1), self._city, self._street,
                                    self._types))
            except Exception:
                # ignore if fetch for next year fails
                pass
        return entries

    def fetch_year(self, year, city, street, types):
        args = {
            "city": city,
            "street": street,
            "year": year,
            "types[]": types,
            "go_ics": "Download",
        }

        # get ics file
        r = requests.get("https://www.abfallkalender-zak.de", params=args)

        # parse ics file
        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #17
0
class Source:
    def __init__(self, district: str):
        self._district = district
        self._ics = ICS()

    def fetch(self):
        now = datetime.now()
        entries = self._fetch_year(now.year)

        if now.month == 12:
            # also get data for next year if we are already in december
            with contextlib.suppress(Exception):
                entries.extend(self._fetch_year(now.year + 1))

        return entries

    def _fetch_year(self, year: int):
        url = "/".join(
            str(param) for param in (
                API_URL,
                self._district,
                year,
                REMINDER_DAY,
                REMINDER_HOUR,
                FILENAME,
            ))

        r = requests.get(url)
        schedule = self._ics.convert(r.text)

        return [
            Collection(date=entry[0],
                       t=entry[1],
                       icon=ICON_MAP.get(entry[1], "mdi:trash-can"))
            for entry in schedule
        ]
コード例 #18
0
class Source:
    def __init__(self,
                 url=None,
                 file=None,
                 offset=None,
                 params=None,
                 year_field=None):
        self._url = url
        self._file = file
        if bool(self._url is not None) == bool(self._file is not None):
            raise RuntimeError("Specify either url or file")
        self._ics = ICS(offset)
        self._params = params
        self._year_field = year_field  # replace this field in params with current year

    def fetch(self):
        if self._url is not None:
            if "{%Y}" in self._url or self._year_field is not None:
                # url contains wildcard or params contains year field
                now = datetime.datetime.now()

                # replace year in url
                url = self._url.replace("{%Y}", str(now.year))

                # replace year in params
                if self._year_field is not None:
                    if self._params is None:
                        raise RuntimeError(
                            "year_field specified without params")
                    self._params[self._year_field] = str(now.year)

                entries = self.fetch_url(url, self._params)

                if now.month == 12:
                    # also get data for next year if we are already in december
                    url = self._url.replace("{%Y}", str(now.year + 1))
                    self._params[self._year_field] = str(now.year + 1)

                    try:
                        entries.extend(self.fetch_url(url), self._params)
                    except Exception:
                        # ignore if fetch for next year fails
                        pass
                return entries
            else:
                return self.fetch_url(self._url, self._params)
        elif self._file is not None:
            return self.fetch_file(self._file)

    def fetch_url(self, url, params=None):
        # get ics file
        r = requests.get(url, params=params, headers=HEADERS)
        r.encoding = "utf-8"  # requests doesn't guess the encoding correctly

        return self._convert(r.text)

    def fetch_file(self, file):
        f = open(file)
        return self._convert(f.read())

    def _convert(self, data):
        dates = self._ics.convert(data)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #19
0
class Source:
    def __init__(self,
                 service,
                 mm_frm_ort_sel=None,
                 mm_frm_str_sel=None,
                 mm_frm_hnr_sel=None):
        self._service = service
        self._mm_frm_ort_sel = mm_frm_ort_sel
        self._mm_frm_str_sel = mm_frm_str_sel
        self._mm_frm_hnr_sel = mm_frm_hnr_sel
        self._ics = ICS()

    def fetch(self):
        mm_ses = InputTextParser(name="mm_ses")

        url = f"https://www.muellmax.de/abfallkalender/{self._service.lower()}/res/{self._service}Start.php"
        r = requests.get(url)
        mm_ses.feed(r.text)

        # select "Abfuhrtermine", returns ort or an empty street search field
        args = {"mm_ses": mm_ses.value, "mm_aus_ort.x": 0, "mm_aus_ort.x": 0}
        r = requests.post(url, data=args)
        mm_ses.feed(r.text)

        if self._mm_frm_ort_sel is not None:
            # select city
            args = {
                "mm_ses": mm_ses.value,
                "xxx": 1,
                "mm_frm_ort_sel": self._mm_frm_ort_sel,
                "mm_aus_ort_submit": "weiter",
            }
            r = requests.post(url, data=args)
            mm_ses.feed(r.text)

        if self._mm_frm_str_sel is not None:
            # show street selection page
            args = {
                "mm_ses": mm_ses.value,
                "xxx": 1,
                "mm_frm_str_name": "",
                "mm_aus_str_txt_submit": "suchen",
            }
            r = requests.post(url, data=args)
            mm_ses.feed(r.text)

            # select street
            args = {
                "mm_ses": mm_ses.value,
                "xxx": 1,
                "mm_frm_str_sel": self._mm_frm_str_sel,
                "mm_aus_str_sel_submit": "weiter",
            }
            r = requests.post(url, data=args)
            mm_ses.feed(r.text)

        if self._mm_frm_hnr_sel is not None:
            # select house number
            args = {
                "mm_ses": mm_ses.value,
                "xxx": 1,
                "mm_frm_hnr_sel": self._mm_frm_hnr_sel,
                "mm_aus_hnr_sel_submit": "weiter",
            }
            r = requests.post(url, data=args)
            mm_ses.feed(r.text)

        # select to get ical
        args = {
            "mm_ses": mm_ses.value,
            "xxx": 1,
            "mm_ica_auswahl": "iCalendar-Datei"
        }
        r = requests.post(url, data=args)
        mm_ses.feed(r.text)

        mm_frm_fra = InputCheckboxParser(startswith="mm_frm_fra")
        mm_frm_fra.feed(r.text)

        # get ics file
        args = {"mm_ses": mm_ses.value, "xxx": 1, "mm_frm_type": "termine"}
        args.update(mm_frm_fra.value)
        args.update({"mm_ica_gen": "iCalendar-Datei laden"})
        r = requests.post(url, data=args)
        mm_ses.feed(r.text)

        entries = []

        # parse ics file
        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #20
0
class Source:
    def __init__(self, level_1, level_2, level_3=None):
        self._districts = [level_1, level_2, level_3]
        self._ics = ICS()

    def fetch(self):
        # Use a session to keep cookies and stuff
        session = requests.Session()

        # Get the IDs of the districts on the first level
        # Double loading is on purpose because sometimes the webpage has an overlay
        # which is gone on the second try in a session
        r = session.get(URL, headers=HEADERS)
        if "Zur aufgerufenen Seite" in r.text:
            r = session.get(URL, headers=HEADERS)
        if r.status_code != 200:
            raise Exception(f"Error: failed to fetch first url: {URL}")

        # Get the IDs of the districts on the first level
        id = self.parse_level(r.text, 1)

        # Get the IDs of the districts on the second level
        url = (
            "https://www.landkreis-harburg.de/ajax/abfall_gebiete_struktur_select.html"
        )
        params = {
            "parent": id,
            "ebene": 1,
            "portal": 1,
            "selected_ebene": 0,
        }
        r = session.get(url, params=params, headers=HEADERS)
        if r.status_code != 200:
            raise Exception(f"Error: failed to fetch second url: {url}")

        # Get the IDs of the districts on the second level
        id = self.parse_level(r.text, 2)

        # Get the IDs of the third level - if applicable
        if self._districts[3 - 1] is not None:
            # Get the IDs of the districts on the third level
            params = {
                "parent": id,
                "ebene": 2,
                "portal": 1,
                "selected_ebene": 0,
            }
            r = session.get(url, params=params, headers=HEADERS)
            if r.status_code != 200:
                raise Exception(f"Error: failed to fetch third url: {url}")

            # Get the IDs of the districts on the third level
            id = self.parse_level(r.text, 3)

        # Prepare data for the real web request
        url = "https://www.landkreis-harburg.de/abfallkalender/abfallkalender_struktur_daten_suche.html"
        params = {
            "selected_ebene": id,
            "owner": 20100,
        }
        r = session.get(url, params=params, headers=HEADERS)

        # Sometimes there is no garbage calendar available
        if "Es sind keine Abfuhrbezirke hinterlegt." in r.text:
            raise Exception(
                f'Error: "Es sind keine Abfuhrbezirke hinterlegt." for "{self._districts[3-1]}". Please use different input data.'
            )

        soup = BeautifulSoup(r.text, features="html.parser")
        links = soup.find_all("a")
        ical_url = ""
        for any_link in links:
            if " als iCal" in any_link.text:
                ical_url = any_link.get("href")

        if "ical.html" not in ical_url:
            raise Exception("No ical Link in the result: " + str(links))

        # Get the final data
        r = requests.get(ical_url, headers=HEADERS)
        if not r.ok:
            raise Exception(f"Error: failed to fetch url: {ical_url}")

        # Parse ics file
        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries

    def parse_level(self, response, level):
        soup = BeautifulSoup(response, features="html.parser")
        select_content = soup.find_all("select", id=f"strukturEbene{level}")
        soup = BeautifulSoup(str(select_content), features="html.parser")
        options_content = soup.find_all("option")
        level_ids = {}
        for option in options_content:
            # Ignore the "Bitte wählen..."
            if option.get("value") != "0":
                level_ids[option.text] = option.get("value")

        if level_ids == {}:
            raise Exception(f"Error: Level {level} Dictionary empty")

        if self._districts[level - 1] not in level_ids:
            raise Exception(
                f"Error: District {self._districts[level]} is not in the dictionary: {level_ids}"
            )

        return level_ids[self._districts[level - 1]]
コード例 #21
0
class Source:
    def __init__(self, city, street=None):
        self._city = city
        self._street = street
        self._ics = ICS()

    def fetch(self):
        # Get the hidden parameters by loading the page
        session = requests.Session()
        r = session.get(URL)
        r.raise_for_status()

        soup = BeautifulSoup(r.text, features="html.parser")
        hidden_tags = soup.find_all("input", type="hidden")

        # Prepare data for the real web request
        data = {}
        for tag in hidden_tags:
            data[tag.get("name")] = tag.get("value")

        # Find the cities which do need a street name
        data_cities_with_streets = soup.find_all("input",
                                                 type="text",
                                                 placeholder="Ort eingeben")
        cities_with_streets = ""
        for tag in data_cities_with_streets:
            cities_with_streets += tag.get("data-cities-with-streets")
        cities_with_streets = cities_with_streets.split(",")

        data["tx_avlcollections_pi5[wasteCalendarLocationItem]"] = self._city
        data["tx_avlcollections_pi5[wasteCalendarStreetItem]"] = self._street

        # Remove some data which the webserver doesn't like
        data.pop("id", None)
        data.pop("tx_kesearch_pi1[page]", None)
        data.pop("tx_kesearch_pi1[resetFilters]", None)
        data.pop("tx_kesearch_pi1[sortByField]", None)
        data.pop("tx_kesearch_pi1[sortByDir]", None)

        # Depending on the city remove the street from the data set
        if self._city.lower() not in cities_with_streets:
            data.pop("tx_avlcollections_pi5[wasteCalendarStreetItem]", None)

        # Get the final data
        r = session.post(URL, data=data)
        r.raise_for_status()

        if r.text.find("Ort konnte nicht gefunden werden.") != -1:
            raise Exception("Error: Ort konnte nicht gefunden werden.")

        if r.text.find("Straße konnte nicht gefunden werden.") != -1:
            raise Exception("Error: Ort konnte nicht gefunden werden.")

        if r.text.find(".ics") == -1:
            raise Exception("Error: No ics link found.")

        soup = BeautifulSoup(r.text, features="html.parser")
        downloads = soup.find_all("a", href=True)
        ics_link = ""
        for download in downloads:
            link = download.get("href")
            if ".ics" in link:
                ics_link = link
        full_url = "https://www.avl-ludwigsburg.de" + ics_link
        return self.fetch_ics(full_url)

    def fetch_ics(self, url):
        r = requests.get(url)
        r.raise_for_status()

        # Parse ics file
        r.encoding = "utf-8"
        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #22
0
class Source:
    def __init__(self, abf_strasse, abf_hausnr):
        self._abf_strasse = abf_strasse
        self._abf_hausnr = abf_hausnr
        self._ics = ICS()

    def fetch(self):
        # get cookie
        r = requests.get("https://www.bsr.de/abfuhrkalender-20520.php")
        cookies = r.cookies

        # get street name only (without PLZ)
        street = self._abf_strasse.split(",")[0]

        # start search using string name (without PLZ)
        args = {"script": "dynamic_search", "step": 1, "q": street}
        r = requests.get(
            "https://www.bsr.de/abfuhrkalender_ajax.php", params=args, cookies=cookies
        )

        # retrieve house number list
        args = {"script": "dynamic_search", "step": 2, "q": self._abf_strasse}
        r = requests.get(
            "https://www.bsr.de/abfuhrkalender_ajax.php", params=args, cookies=cookies
        )

        args = {
            "abf_strasse": street,
            "abf_hausnr": self._abf_hausnr,
            "tab_control": "Jahr",
            "abf_config_weihnachtsbaeume": "",
            "abf_config_restmuell": "on",
            "abf_config_biogut": "on",
            "abf_config_wertstoffe": "on",
            "abf_config_laubtonne": "on",
            # "abf_selectmonth": "5 2020",
            # "abf_datepicker": "28.04.2020",
            # "listitems":7,
        }
        r = requests.post(
            "https://www.bsr.de/abfuhrkalender_ajax.php?script=dynamic_kalender_ajax",
            data=args,
            cookies=cookies,
        )

        args = {
            "script": "dynamic_iCal_ajax",
            "abf_strasse": self._abf_strasse,
            "abf_hausnr": self._abf_hausnr,
            "tab_control": "Jahr",
            "abf_config_weihnachtsbaeume": "",
            "abf_config_restmuell": "on",
            "abf_config_biogut": "on",
            "abf_config_wertstoffe": "on",
            "abf_config_laubtonne": "on",
            # "abf_selectmonth": "5 2020",
            # "listitems":7,
        }

        # create url using private url encoding
        encoded = map(lambda key: f"{key}={myquote(str(args[key]))}", args.keys())
        url = "https://www.bsr.de/abfuhrkalender_ajax.php?" + "&".join(encoded)
        r = requests.get(url, cookies=cookies)

        # parse ics file
        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #23
0
class Source:
    def __init__(
        self,
        url=None,
        file=None,
        offset=None,
        params=None,
        year_field=None,
        method="GET",
        split_at=None,
        version=2,
        verify_ssl=True,
    ):
        self._url = url
        self._file = file
        if bool(self._url is not None) == bool(self._file is not None):
            raise RuntimeError("Specify either url or file")
        if version == 1:
            self._ics = ICS_v1(offset=offset, split_at=split_at)
        else:
            self._ics = ICS(offset=offset, split_at=split_at)
        self._params = params
        self._year_field = year_field  # replace this field in params with current year
        self._method = method  # The method to send the params
        self._verify_ssl = verify_ssl

    def fetch(self):
        if self._url is not None:
            if "{%Y}" in self._url or self._year_field is not None:
                # url contains wildcard or params contains year field
                now = datetime.datetime.now()

                # replace year in url
                url = self._url.replace("{%Y}", str(now.year))

                # replace year in params
                if self._year_field is not None:
                    if self._params is None:
                        raise RuntimeError(
                            "year_field specified without params")
                    self._params[self._year_field] = str(now.year)

                entries = self.fetch_url(url, self._params)

                if now.month == 12:
                    # also get data for next year if we are already in december
                    url = self._url.replace("{%Y}", str(now.year + 1))
                    if self._year_field is not None:
                        self._params[self._year_field] = str(now.year + 1)

                    try:
                        entries.extend(self.fetch_url(url, self._params))
                    except Exception:
                        # ignore if fetch for next year fails
                        pass
                return entries
            else:
                return self.fetch_url(self._url, self._params)
        elif self._file is not None:
            return self.fetch_file(self._file)

    def fetch_url(self, url, params=None):
        # get ics file
        if self._method == "GET":
            r = requests.get(url,
                             params=params,
                             headers=HEADERS,
                             verify=self._verify_ssl)
        elif self._method == "POST":
            r = requests.post(url,
                              data=params,
                              headers=HEADERS,
                              verify=self._verify_ssl)
        else:
            raise RuntimeError(
                "Error: unknown method to fetch URL, use GET or POST; got {self._method}"
            )
        r.encoding = "utf-8"  # requests doesn't guess the encoding correctly

        # check the return code
        if not r.ok:
            _LOGGER.error(
                "Error: the response is not ok; need code 200, but got code %s"
                % r.status_code)
            return []

        # log the result. for debugging
        _LOGGER.debug(r.text)

        return self._convert(r.text)

    def fetch_file(self, file):
        f = open(file)
        return self._convert(f.read())

    def _convert(self, data):
        dates = self._ics.convert(data)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #24
0
class Source:
    def __init__(self, city, street=None):
        self._city = city
        self._street = street
        self._ics = ICS()

    def fetch(self):
        cityId = self.fetch_city_id(self._city)
        streetId = self.fetch_street_id(cityId, self._street)

        return self.fetch_ics(DOWNLOAD_URL.format(streetId))

    def is_city_selection(self, tag, cityName):
        return tag['value'] != "" and tag.string == self._city

    def fetch_city_id(self, cityName):
        r = requests.get(URL)
        if not r.ok:
            raise Exception("Error: failed to fetch url: {}".format(URL))

        soup = BeautifulSoup(r.text, 'html.parser')
        citySelection = [
            a for a in soup.select('#sf_locid > option[value]')
            if self.is_city_selection(a, cityName)
        ]
        if len(citySelection) == 0:
            raise Exception(
                "Error: could not find id for city: '{}'".format(cityName))

        if len(citySelection) > 1:
            raise Exception(
                "Error: non-unique match for city: '{}'".format(cityName))

        return citySelection[0]['value']

    def fetch_street_id(self, cityId, streetName):
        r = requests.get(AUTOCOMPLETE_URL.format(cityId, streetName),
                         headers={"Referer": URL})

        if not r.ok:
            raise Exception("Error: failed to fetch url: {}".format(
                AUTOCOMPLETE_URL.format(cityId, streetName)))

        streets = json.loads(r.text)
        if streetName != None:
            streetId = [item[0] for item in streets if streetName in item[1]]
        else:
            streetId = [item[0] for item in streets]

        if len(streetId) == 0:
            raise Exception(
                "Error: could not find streets for city id / street: {}, '{}'".
                format(cityId, streetName))

        if len(streetId) > 1:
            raise Exception(
                "Error: non-unique match for city id / street: {}, '{}'".
                format(cityId, streetName))

        return streetId[0]

    def fetch_ics(self, url):
        r = requests.get(url, headers={"Referer": URL})

        if not r.ok:
            raise Exception("Error: failed to fetch url: {}".format(url))

        # parse ics file
        r.encoding = "utf-8"
        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #25
0
ファイル: abfall_io.py プロジェクト: fenlis/hass-config
class Source:
    def __init__(
        self,
        key,
        f_id_kommune,
        f_id_strasse,
        f_id_bezirk=None,
        f_id_strasse_hnr=None,
        f_abfallarten=[],
    ):
        self._key = key
        self._kommune = f_id_kommune
        self._bezirk = f_id_bezirk
        self._strasse = f_id_strasse
        self._strasse_hnr = f_id_strasse_hnr
        self._abfallarten = f_abfallarten  # list of integers
        self._ics = ICS()

    def fetch(self):
        # get token
        params = {"key": self._key, "modus": MODUS_KEY, "waction": "init"}

        r = requests.post("https://api.abfall.io",
                          params=params,
                          headers=HEADERS)

        # add all hidden input fields to form data
        # There is one hidden field which acts as a token:
        # It consists of a UUID key and a UUID value.
        p = HiddenInputParser()
        p.feed(r.text)
        args = p.args

        args["f_id_kommune"] = self._kommune
        args["f_id_strasse"] = self._strasse

        if self._bezirk is not None:
            args["f_id_bezirk"] = self._bezirk

        if self._strasse_hnr is not None:
            args["f_id_strasse_hnr"] = self._strasse_hnr

        for i in range(len(self._abfallarten)):
            args[f"f_id_abfalltyp_{i}"] = self._abfallarten[i]

        args["f_abfallarten_index_max"] = len(self._abfallarten)
        args["f_abfallarten"] = ",".join(
            map(lambda x: str(x), self._abfallarten))

        now = datetime.datetime.now()
        date2 = now.replace(year=now.year + 1)
        args[
            "f_zeitraum"] = f"{now.strftime('%Y%m%d')}-{date2.strftime('%Y%m%d')}"

        params = {
            "key": self._key,
            "modus": MODUS_KEY,
            "waction": "export_ics"
        }

        # get csv file
        r = requests.post("https://api.abfall.io",
                          params=params,
                          data=args,
                          headers=HEADERS)

        # parse ics file
        r.encoding = "utf-8"  # requests doesn't guess the encoding correctly
        ics_file = r.text

        dates = self._ics.convert(ics_file)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #26
0
class Source:
    def __init__(self, street):
        self._street = street
        self._ics = ICS()

    def fetch(self):
        now = datetime.datetime.now()
        entries = self.fetch_year(now.year)

        if now.month == 12:
            # also get data for next year if we are already in december
            try:
                entries.extend(self.fetch_year(now.year + 1))
            except Exception:
                # ignore if fetch for next year fails
                pass
        return entries

    def fetch_year(self, year):
        with requests.Session() as s:
            # get session id
            s.get("https://www.oberhausen.de/abfallkalender")

            # update session with filters and get ICS link
            r = s.post("https://www.oberhausen.de/abfallkalender",
                       data={
                           "abfall_searchstring": self._street,
                           "abfall_jahr": year,
                           "actbio": "on",
                           "actdeckgruen": "on",
                           "actdeckrot": "on",
                           "actdeckblau": "on",
                           "actgelb": "on",
                           "actpapier": "on",
                           "submit_search": ""
                       })

            # extract ICS link
            parser = ICSLinkParser()
            parser.feed(r.text)

            if not "href" in parser.args or parser.args["href"] == "":
                raise Exception(
                    "Error: could not extract ICS download link for year / street: {}, '{}'"
                    .format(year, self._street))

            # download ICS file
            r = s.get(parser.args['href'])

            # check the return code
            if not r.ok:
                raise Exception(
                    "Error: the response is not ok; need code 200, but got code {}"
                    .format(r.status_code))

        # parse ics file
        r.encoding = "utf-8"
        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
コード例 #27
0
class Source:
    def __init__(self,
                 city: str,
                 street: str,
                 house_number: int,
                 address_suffix: str = ""):
        self._city = city
        self._street = street
        self._hnr = house_number
        self._suffix = address_suffix
        self._ics = ICS()

    def fetch(self):
        session = requests.session()

        r = session.get(
            "https://aao.rh-entsorgung.de/WasteManagementRheinhunsrueck/WasteManagementServlet",
            params={
                "SubmitAction": "wasteDisposalServices",
                "InFrameMode": "TRUE"
            },
        )
        r.raise_for_status()
        r.encoding = "utf-8"

        parser = HiddenInputParser()
        parser.feed(r.text)

        args = parser.args
        args["Ort"] = self._city
        args["Strasse"] = self._street
        args["Hausnummer"] = str(self._hnr)
        args["Hausnummerzusatz"] = self._suffix
        args["SubmitAction"] = "CITYCHANGED"
        r = session.post(
            "https://aao.rh-entsorgung.de/WasteManagementRheinhunsrueck/WasteManagementServlet",
            data=args,
        )
        r.raise_for_status()

        args["SubmitAction"] = "forward"
        args["ContainerGewaehltRM"] = "on"
        args["ContainerGewaehltBM"] = "on"
        args["ContainerGewaehltLVP"] = "on"
        args["ContainerGewaehltPA"] = "on"
        args["ContainerGewaehltPrMuell"] = "on"
        r = session.post(
            "https://aao.rh-entsorgung.de/WasteManagementRheinhunsrueck/WasteManagementServlet",
            data=args,
        )
        r.raise_for_status()

        args[
            "ApplicationName"] = "com.athos.kd.rheinhunsrueck.AbfuhrTerminModel"
        args["SubmitAction"] = "filedownload_ICAL"
        r = session.post(
            "https://aao.rh-entsorgung.de/WasteManagementRheinhunsrueck/WasteManagementServlet",
            data=args,
        )
        r.raise_for_status()

        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries