Ejemplo n.º 1
0
    def fetch(self):
        # fetch "Gelber Sack"
        args = {"g": self._city}
        r = requests.get(
            "https://was-wolfsburg.de/subgelberweihgarten/php/abfuhrgelber.php",
            params=args,
        )

        entries = []
        match = re.findall(r"(\d{2})\.(\d{2})\.(\d{4})", r.text)
        for m in match:
            date = datetime.date(day=int(m[0]), month=int(m[1]), year=int(m[2]))
            entries.append(Collection(date, "Gelber Sack"))

        # fetch remaining collections
        args = {"ortabf": self._street}
        r = requests.post(
            "https://was-wolfsburg.de/subabfuhrtermine/ics_abfuhrtermine3.php",
            data=args,
        )
        dates = self._ics.convert(r.text)
        for d in dates:
            entries.append(Collection(d[0], d[1]))

        return entries
Ejemplo n.º 2
0
    def fetch(self):
        url = "https://recycleapp.be/api/app/v1"
        headers = {
            "x-secret": "Crgja3EGWe8jdapyr4EEoMBgZACYYjRRcRpaMQrLDW9HJBvmgkfGQyYqLgeXPavAGvnJqkV87PBB2b8zx43q46sUgzqio4yRZbABhtKeagkVKypTEDjKfPgGycjLyJTtLHYpzwJgp4YmmCuJZN9ZmJY8CGEoFs8MKfdJpU9RjkEVfngmmk2LYD4QzFegLNKUbcCeAdEW",
            "x-consumer": "recycleapp.be",
            "User-Agent": "",
            "Authorization": "",
        }
        r = requests.get(f"{url}/access-token", headers=headers)
        headers["Authorization"] = r.json()["accessToken"]

        params = {"q": self._postcode}
        r = requests.get(f"{url}/zipcodes", params=params, headers=headers)
        if r.status_code != 200:
            _LOGGER.error("Get zip code failed")
            return []
        zipcodeId = r.json()["items"][0]["id"]

        params = {"q": self._street, "zipcodes": zipcodeId}
        r = requests.get(f"{url}/streets", params=params, headers=headers)
        if r.status_code != 200:
            _LOGGER.error("Get street id failed")
            return []

        for item in r.json()["items"]:
            if item["name"] == self._street:
                streetId = item["id"]
        if streetId is None:
            streetId = r.json()["items"][0]["id"]

        now = datetime.now()
        fromDate = now.strftime("%Y-%m-%d")
        untilDate = (now + timedelta(days=365)).strftime("%Y-%m-%d")
        params = {
            "zipcodeId": zipcodeId,
            "streetId": streetId,
            "houseNumber": self._house_number,
            "fromDate": fromDate,
            "untilDate": untilDate,
            #            "size":100,
        }
        r = requests.get(f"{url}/collections", params=params, headers=headers)
        if r.status_code != 200:
            _LOGGER.error("Get data failed")
            return []

        entries = []
        for item in r.json()["items"]:
            if "exception" in item and "replacedBy" in item["exception"]:
                continue

            date = datetime.strptime(item["timestamp"], "%Y-%m-%dT%H:%M:%S.000Z").date()
            if item["type"] == "collection":
                entries.append(Collection(date, item["fraction"]["name"]["en"]))
            elif item["type"] == "event" and self._add_events:
                entries.append(Collection(date, item["event"]["title"]["en"]))

        return entries
    def fetch(self):
        # get json file
        r = requests.get(
            f"https://new-llpg-app.azurewebsites.net/api/calendar/{self._llpgid}"
        )

        # extract data from json
        data = json.loads(r.text)

        entries = []

        for weeks in data["Weeks"]:
            rows = weeks["Rows"]
            for key in iter(rows):
                for day in rows[key]:
                    try:
                        # Colchester.gov.uk provide their rubbish collection information in the format of a 2-week
                        # cycle. These weeks represent 'Blue' weeks and 'Green' weeks (Traditionally, non-recyclables
                        # and recyclable weeks). The way the JSON response represents this is by specifying the
                        # `DatesOfFirstCollectionDays`, the first collection day of the cycle, and having a boolean
                        # `WeekOne` field in each week representing if it's the first week of the cycle, a 'Blue' week,
                        # or the second, a 'Green' week. If the week is not `WeekOne`, a 'Blue' week,  then 7 days need
                        # to be added to the `DatesOfFirstCollectionDays` date to provide the correct 'Green' week
                        # collection date.
                        date = datetime.strptime(
                            data["DatesOfFirstCollectionDays"][key],
                            "%Y-%m-%dT%H:%M:%S")
                        if not weeks["WeekOne"]:
                            date = date + timedelta(days=7)
                        if date > datetime.now():
                            entries.append(
                                Collection(
                                    date=date.date(),
                                    t=day["Name"].title(),
                                    icon=ICONS[day["Name"]],
                                ))
                        # As Colchester.gov.uk only provides the current collection cycle, the next must be extrapolated
                        # from the current week. This is the same method the website uses to display further collection
                        # weeks.
                        entries.append(
                            Collection(
                                date=date.date() + timedelta(days=14),
                                t=day["Name"].title(),
                                icon=ICONS[day["Name"]],
                            ))
                    except ValueError:
                        pass  # ignore date conversion failure for not scheduled collections

        return entries
    def fetch(self):
        args = {
            "ort": self._ort,
            "strasse": self._strasse,
            "nummer": self._nummer,
        }

        # get latitude/longitude file
        r = requests.post(
            "https://app.awb-bad-kreuznach.de/api/checkAddress.php", data=args)
        data = json.loads(r.text)

        # get dates
        del args["nummer"]
        args["mode"] = "web"
        args["lat"] = data["lat"]
        args["lon"] = data["lon"]
        r = requests.post("https://app.awb-bad-kreuznach.de/api/loadDates.php",
                          data=args)
        data = json.loads(r.text)

        entries = []

        for d in data["termine"]:
            date = datetime.date.fromisoformat(d["termin"])
            for type in TYPES:
                if d[type] != "0":
                    entries.append(Collection(date, type, date))

        return entries
Ejemplo n.º 5
0
    def fetch(self):
        response = requests.get("https://www.lindau.ch/abfalldaten")

        html = BeautifulSoup(response.text, "html.parser")

        table = html.find("table", attrs={"id": "icmsTable-abfallsammlung"})
        data = json.loads(table.attrs["data-entities"])

        entries = []
        for item in data["data"]:
            if (self._city in item["abfallkreisIds"]
                    or self._city in item["abfallkreisNameList"]):
                next_pickup = item["_anlassDate-sort"].split()[0]
                next_pickup_date = datetime.fromisoformat(next_pickup).date()

                waste_type = BeautifulSoup(item["name"], "html.parser").text
                waste_type_sorted = BeautifulSoup(item["name-sort"],
                                                  "html.parser").text

                entries.append(
                    Collection(
                        date=next_pickup_date,
                        t=waste_type,
                        icon=IconMap.get(waste_type_sorted, "mdi:trash-can"),
                    ))

        return entries
Ejemplo n.º 6
0
    def fetch(self):
        # get json file
        r = requests.get(
            f"https://waste-api.york.gov.uk/api/Collections/GetBinCalendarDataForUprn/{self._uprn}"
        )

        # extract data from json
        data = json.loads(r.text)

        entries = []

        for collection in data["collections"]:
            try:
                entries.append(
                    Collection(
                        date=datetime.strptime(
                            collection["date"], "%Y-%m-%dT%H:%M:%S"
                        ).date(),
                        t=collection["roundType"].title(),
                        icon=ICONS[collection["roundType"]],
                    )
                )
            except ValueError:
                pass  # ignore date conversion failure for not scheduled collections

        return entries
    def fetch(self):

        now = datetime.datetime.now().date()

        r = requests.get(
            "https://stadtplan.dresden.de/project/cardo3Apps/IDU_DDStadtplan/abfall/ical.ashx",
            params={
                "STANDORT": self._standort,
                "DATUM_VON": now.strftime("%d.%m.%Y"),
                "DATUM_BIS": (now + datetime.timedelta(days=365)).strftime("%d.%m.%Y"),
            },
        )

        dates = self._ics.convert(r.text)

        # example: "Leerung Gelbe Tonne, Bio-Tonne"

        entries = []
        for d in dates:
            if d[1] == "Abfallkalender endet bald":
                continue

            types = d[1].removeprefix("Leerung ")
            for type in types.split(", "):
                entries.append(Collection(d[0], type))
        return entries
    def fetch(self):
        # get json file
        r = requests.get(
            f"https://servicelayer3c.azure-api.net/wastecalendar/collection/search/{self._uprn}?authority=HDC&take=20"
        )

        # extract data from json
        data = json.loads(r.text)

        entries = []

        collections = r.json()["collections"]
        entries = []

        for collection in collections:
            for round_type in collection["roundTypes"]:
                entries.append(
                    Collection(
                        date=datetime.strptime(collection["date"],
                                               "%Y-%m-%dT%H:%M:%SZ").date(),
                        t=round_type.title(),
                        icon=ICONS.get(round_type),
                    ))

        return entries
Ejemplo n.º 9
0
    def fetch(self):
        # fetch location id
        r = requests.get(
            API_URLS["address_search"], params={"postCode": self._post_code}
        )
        r.raise_for_status()
        addresses = r.json()

        address_ids = [
            x["id"] for x in addresses if x["houseNumber"].capitalize() == self._number
        ]

        if len(address_ids) == 0:
            raise Exception(f"Could not find address {self._post_code} {self._number}")

        q = str(API_URLS["collection"]).format(address_ids[0])
        r = requests.get(q)
        r.raise_for_status()

        collections = r.json()["collections"]
        entries = []

        for collection in collections:
            for round_type in collection["roundTypes"]:
                entries.append(
                    Collection(
                        date=datetime.strptime(
                            collection["date"], "%Y-%m-%dT%H:%M:%SZ"
                        ).date(),
                        t=round_type.title(),
                        icon=ICONS.get(round_type),
                    )
                )

        return entries
    def fetch(self):
        response1 = requests.get(
            "https://www.republicservices.com/api/v1/addresses",
            params={"addressLine1": self._street_address},
        )

        address_hash = json.loads(response1.text)["data"][0]["addressHash"]

        response2 = requests.get(
            "https://www.republicservices.com/api/v1/publicPickup",
            params={"siteAddressHash": address_hash},
        )

        r_json = json.loads(response2.text)["data"]

        entries = []

        for x in r_json:
            if hasattr(r_json[x], "__iter__"):
                for item in r_json[x]:
                    waste_type = item["wasteTypeDescription"]
                    icon = "mdi:trash-can"
                    if waste_type == "Recycle":
                        icon = "mdi:recycle"
                    for day in item["nextServiceDays"]:
                        next_pickup = day
                        next_pickup_date = datetime.fromisoformat(
                            next_pickup).date()
                        entries.append(
                            Collection(date=next_pickup_date,
                                       t=waste_type,
                                       icon=icon))

        return entries
Ejemplo n.º 11
0
    def fetch(self):
        data = {
            "postCode": self._post_code,
            "houseNumber": self._house_number,
            "companyCode": self._company_code,
        }
        r = requests.post(f"{self._url}/api/FetchAdress", data=data)
        d = r.json()

        dataList = d["dataList"][0]
        data = {
            "uniqueAddressID": dataList["UniqueId"],
            "startDate": datetime.now().strftime("%Y-%m-%d"),
            "endDate":
            (datetime.now() + timedelta(days=365)).strftime("%Y-%m-%d"),
            "companyCode": self._company_code,
            "community": dataList.get("Community", ""),
        }
        r = requests.post(f"{self._url}/api/GetCalendar", data=data)
        d = r.json()

        entries = []
        for wasteType in d["dataList"]:
            for date in wasteType["pickupDates"]:
                entries.append(
                    Collection(
                        date=datetime.strptime(date,
                                               "%Y-%m-%dT%H:%M:%S").date(),
                        t=wasteType["_pickupTypeText"],
                    ))
        return entries
Ejemplo n.º 12
0
    def fetch(self):
        response = requests.post(
            "https://vatjanst.lerum.se/FutureWeb/SimpleWastePickup/SearchAdress",
            {"searchText": self._street_address})

        address_data = json.loads(response.text)
        address = None
        if address_data["Succeeded"] and address_data["Succeeded"] is True:
            if address_data["Buildings"] and len(
                    address_data["Buildings"]) > 0:
                address = address_data["Buildings"][0]

        if not address:
            return []

        query_params = urlencode({"address": address})
        response = requests.get(
            "https://vatjanst.lerum.se/FutureWeb/SimpleWastePickup/GetWastePickupSchedule?{}"
            .format(query_params))
        data = json.loads(response.text)

        entries = []
        for item in data["RhServices"]:
            waste_type = item["WasteType"]
            icon = "mdi:trash-can"
            if waste_type == "Matavfall":
                icon = "mdi:leaf"
            next_pickup = item["NextWastePickup"]
            next_pickup_date = datetime.fromisoformat(next_pickup).date()
            entries.append(
                Collection(date=next_pickup_date, t=waste_type, icon=icon))

        return entries
    def fetch(self):
        dates = []

        if self._recurrence is not None:
            ruledates = rrule(
                freq=self._recurrence,
                interval=self._interval,
                dtstart=self._start,
                until=self._until,
            )

            for ruleentry in ruledates:
                date = ruleentry.date()

                if self._excludes is not None and date in self._excludes:
                    continue

                dates.append(date)

        if self._dates is not None:
            dates.extend(self._dates)

        dates.sort()

        entries = [Collection(date, self._type) for date in set(dates)]
        return entries
    def fetch(self):
        entries = []
        session = requests.Session()

        # Find the UPRN based on the postcode and the property name/number
        if self._uprn is None:
            args = {"Postcode": self._postcode}
            r = session.get(SEARCH_URLS["uprn_search"], params=args)
            r.raise_for_status()
            soup = BeautifulSoup(r.text, features="html.parser")
            propertyUprns = soup.find(id="Uprn").find_all("option")
            for match in propertyUprns:
                if match.text.startswith(self._housenumberorname):
                    self._uprn = match["value"]

        # Get the collection days based on the UPRN (either supplied through arguments or searched for above)
        if self._uprn is not None:
            args = {"uprn": self._uprn}
            r = session.get(SEARCH_URLS["collection_search"], params=args)
            r.raise_for_status()
            soup = BeautifulSoup(r.text, features="html.parser")
            for collection in COLLECTIONS:
                d = (soup.find(id=collection.lower()).find_all("span")[-1].text
                     + " " + str(date.today().year))

                entries.append(
                    Collection(
                        datetime.strptime(d, "%d %b %Y").date(),
                        collection,
                    ))

        return entries
Ejemplo n.º 15
0
    def fetch(self):

        args = {
            'id': 430,
            'tx_citkoabfall_abfallkalender[strasse]': str(self._street).encode('utf-8'),
            'tx_citkoabfall_abfallkalender[hausnummer]': str(self._house_number).encode('utf-8'),
            'tx_citkoabfall_abfallkalender[abfallarten][0]': 61,
            'tx_citkoabfall_abfallkalender[abfallarten][1]': 60,
            'tx_citkoabfall_abfallkalender[abfallarten][2]': 59,
            'tx_citkoabfall_abfallkalender[abfallarten][3]': 58,
            'tx_citkoabfall_abfallkalender[action]': 'ics',
            'tx_citkoabfall_abfallkalender[controller]': 'FrontendIcs'
        }

        # use '%20' instead of '+' in URL
        # https://stackoverflow.com/questions/21823965/use-20-instead-of-for-space-in-python-query-parameters
        args = urllib.parse.urlencode(args, quote_via=urllib.parse.quote)

        # post request
        reply = requests.get(URL, params=args)

        # create calender from reply
        gcal = icalendar.Calendar.from_ical(reply.text)

        # iterate over events and add to waste collection
        entries = []
        for component in gcal.walk():
            if component.name == "VEVENT":
                type = component.get('summary')
                start_time = component.get('dtstart').dt

                entries.append(Collection(start_time.date(), type))

        return entries
Ejemplo n.º 16
0
    def fetch(self):
        data = {"query": self._street_address}
        response = requests.post(
            "https://www.vasyd.se/api/sitecore/MyPagesApi/BuildingAddressSearch",
            data=data,
        )

        building_data = json.loads(response.text)["items"]
        building_id = None
        if building_data and len(building_data) > 0:
            building_id = building_data[0]["id"]

        if not building_id:
            return []

        data = {"query": building_id, "street": self._street_address}
        response = requests.post(
            "https://www.vasyd.se/api/sitecore/MyPagesApi/WastePickupByAddress",
            data=data,
        )

        data = json.loads(response.text)["items"]

        entries = []
        for item in data:
            waste_type = item["wasteType"]
            icon = "mdi:trash-can"
            if waste_type == "Trädgårdsavfall":
                icon = "mdi:leaf"
            next_pickup = item["nextWastePickup"]
            next_pickup_date = datetime.fromisoformat(next_pickup).date()
            entries.append(Collection(date=next_pickup_date, t=waste_type, icon=icon))

        return entries
Ejemplo n.º 17
0
    def handle_starttag(self, tag, attrs):
        if tag == "div":
            d = dict(attrs)
            id = d.get("id", "")
            if id.endswith("HouseholdBlock"):
                self._withinHouseholdDiv = True
            if id.endswith("CommercialBlock"):
                self._withinHouseholdDiv = False

        if self._withinHouseholdDiv:
            s = dict(attrs)
            className = s.get("class", "")
            if tag == "div":
                if className == "links":
                    self._withinRubbishLinks = True
                else:
                    self._withinRubbishLinks = False

            if tag == "span":
                if className.startswith("m-r-1"):
                    self._withinWasteDateSpan = True

                if self._workingWasteDate is not None:
                    if className.startswith(
                            "icon-rubbish") or className.startswith(
                                "icon-recycle"):
                        type = s["class"][5:]  # remove "icon-"
                        self._entries.append(
                            Collection(self._workingWasteDate, type))
Ejemplo n.º 18
0
    def _convert(self, data):
        dates = self._ics.convert(data)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
Ejemplo n.º 19
0
    def fetch(self):
        dates = self._api.get_dates(self._ort, self._strasse, self._hausnummer)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
Ejemplo n.º 20
0
    def fetch(self):
        r = requests.get(
            url="https://www.kwb-goslar.de/output/options.php",
            params={
                "ModID": "48",
                "call": "ical",
                "pois": self.pois,
            },
            headers={
                "Referer": "https://www.kwb-goslar.de",
            },
        )

        if not r.ok:
            raise Exception(f"Error: failed to fetch url: {r.request.url}")

        dates = self.ics.convert(r.text)

        entries = []
        for d in dates:
            date, waste_type = d
            icon = ICON_MAP.get(waste_type, "mdi:trash-can-outline")
            entries.append(Collection(date=date, t=waste_type, icon=icon))

        return entries
Ejemplo n.º 21
0
    def fetch_year(self, year, city, street, types):
        args = {
            "city": city,
            "street": street,
            "year": year,
            "types[]": types,
            "go_ics": "Download",
        }

        # get ics file
        r = requests.get("https://www.abfallkalender-zak.de", params=args)

        # parse ics file
        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            waste_type = d[1]
            next_pickup_date = d[0]

            entries.append(
                Collection(date=next_pickup_date,
                           t=waste_type,
                           icon=self._iconMap.get(waste_type,
                                                  "mdi:trash-can")))

        return entries
Ejemplo n.º 22
0
    def fetch(self):
        params = {"address": self._street_address}
        response = requests.get(
            "https://www.sysav.se/api/my-pages/PickupSchedule/findAddress",
            params=params,
        )

        address_data = json.loads(response.text)
        address = None
        if address_data and len(address_data) > 0:
            address = address_data[0]

        if not address:
            return []

        params = {"address": address}
        response = requests.get(
            "https://www.sysav.se/api/my-pages/PickupSchedule/ScheduleForAddress",
            params=params,
        )

        data = json.loads(response.text)

        entries = []
        for item in data:
            waste_type = item["WasteType"]
            icon = "mdi:trash-can"
            if waste_type == "Trädgårdsavfall":
                icon = "mdi:leaf"
            next_pickup = item["NextPickupDate"]
            next_pickup_date = datetime.fromisoformat(next_pickup).date()
            entries.append(
                Collection(date=next_pickup_date, t=waste_type, icon=icon))

        return entries
Ejemplo n.º 23
0
    def fetch(self):
        params = {"start": int(time.time()), "end": int(time.time() + 2678400)}
        if self._street_index is not None:
            params["street"] = self._street_index
            response = requests.get("https://www.hygea.be/displaycal.html",
                                    params=params)
        elif self._cp is not None:
            params["street"] = self._cp
            response = requests.get("https://www.hygea.be/displaycalws.html",
                                    params=params)

        if not response.ok:
            return []
        data = json.loads(response.text)

        entries = []
        for day in data:
            date = datetime.datetime.strptime(day["start"],
                                              "%Y-%m-%dT%H:%M:%S%z").date()

            # example for day["className"]: 12  notadded pos136 om multi
            waste_types = set(day["className"].split())
            for abbr, map in WASTE_MAP.items():
                if abbr in waste_types:
                    c = Collection(date=date, t=map["type"], icon=map["icon"])
                    entries.append(c)

        return entries
    def fetch(self):
        # Get & parse full HTML only on first call to fetch() to map district or street to district_id
        if not self._district_id:
            self._district_id = self.map_district_id(self._district, self._street)

        if not self._district_id:
            raise ValueError("'_district_id' is not set!")

        now = datetime.datetime.now().date()

        r = requests.get(
            URL,
            params={
                "_func": "evList",
                "_mod": "events",
                "ev[start]": str(now),
                "ev[end]": str(now + datetime.timedelta(days=365)),
                "ev[addr]": self._district_id,
            },
        )
        r.raise_for_status()

        entries = []
        for event in r.json()["contents"].values():
            entries.append(
                Collection(
                    datetime.datetime.fromisoformat(event["start"]).date(),
                    event["title"],
                    picture=event.get("thumb", {}).get("url"),
                )
            )

        return entries
    def fetch(self):
        # get list of suburbs
        r = requests.get(f"{self._url}/suburbs/get")
        data = json.loads(r.text)

        suburbs = {}
        for entry in data:
            suburbs[entry["Suburb"].strip()] = entry["SuburbKey"]

        # check if suburb exists
        if self._suburb not in suburbs:
            _LOGGER.error(f"suburb not found: {self._suburb}")
            return []
        suburbKey = suburbs[self._suburb]

        # get list of streets for selected suburb
        r = requests.get(f"{self._url}/streets/{suburbKey}")
        data = json.loads(r.text)

        streets = {}
        for entry in data:
            streets[entry["Street"].strip()] = entry["StreetKey"]

        # check if street exists
        if self._street not in streets:
            _LOGGER.error(f"street not found: {self._street}")
            return []
        streetKey = streets[self._street]

        # get list of house numbers for selected street
        params = {"streetkey": streetKey, "suburbKey": suburbKey}
        r = requests.get(
            f"{self._url}/properties/GetPropertiesByStreetAndSuburbKey", params=params,
        )
        data = json.loads(r.text)

        houseNos = {}
        for entry in data:
            houseNos[
                str(int(entry["HouseNo"])) + entry.get("HouseSuffix", "").strip()
            ] = entry["PropertyKey"]

        # check if house number exists
        if self._houseNo not in houseNos:
            _LOGGER.error(f"house number not found: {self._houseNo}")
            return []
        propertyKey = houseNos[self._houseNo]

        # get collection schedule
        r = requests.get(f"{self._url}/services/{propertyKey}")
        data = json.loads(r.text)

        entries = []
        for entry in data:
            name = entry["Name"]
            for dateStr in entry["CollectionDays"]:
                date = datetime.datetime.strptime(dateStr, "%Y-%m-%dT%H:%M:%S").date()
                entries.append(Collection(date, name))
        return entries
Ejemplo n.º 26
0
    def fetch(self):
        # get token
        params = {"key": self._key, "modus": MODUS_KEY, "waction": "init"}

        r = requests.post("https://api.abfall.io",
                          params=params,
                          headers=HEADERS)

        # add all hidden input fields to form data
        # There is one hidden field which acts as a token:
        # It consists of a UUID key and a UUID value.
        p = HiddenInputParser()
        p.feed(r.text)
        args = p.args

        args["f_id_kommune"] = self._kommune
        args["f_id_strasse"] = self._strasse

        if self._bezirk is not None:
            args["f_id_bezirk"] = self._bezirk

        if self._strasse_hnr is not None:
            args["f_id_strasse_hnr"] = self._strasse_hnr

        for i in range(len(self._abfallarten)):
            args[f"f_id_abfalltyp_{i}"] = self._abfallarten[i]

        args["f_abfallarten_index_max"] = len(self._abfallarten)
        args["f_abfallarten"] = ",".join(
            map(lambda x: str(x), self._abfallarten))

        now = datetime.datetime.now()
        date2 = now.replace(year=now.year + 1)
        args[
            "f_zeitraum"] = f"{now.strftime('%Y%m%d')}-{date2.strftime('%Y%m%d')}"

        params = {
            "key": self._key,
            "modus": MODUS_KEY,
            "waction": "export_ics"
        }

        # get csv file
        r = requests.post("https://api.abfall.io",
                          params=params,
                          data=args,
                          headers=HEADERS)

        # parse ics file
        r.encoding = "utf-8"  # requests doesn't guess the encoding correctly
        ics_file = r.text

        dates = self._ics.convert(ics_file)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
Ejemplo n.º 27
0
    def fetch(self):
        session = requests.session()

        r = session.get(
            "https://webudb.udb.at/WasteManagementUDB/WasteManagementServlet?SubmitAction=wasteDisposalServices&InFrameMode=TRUE"
        )

        # add all hidden input fields to form data
        p = HiddenInputParser()
        p.feed(r.text)
        args = p.args

        args["Focus"] = "Ort"
        args["SubmitAction"] = "changedEvent"
        args["Ort"] = self._ort
        args["Strasse"] = "HAUSNUMMER"
        args["Hausnummer"] = 0
        r = session.post(
            "https://webudb.udb.at/WasteManagementUDB/WasteManagementServlet", data=args
        )

        args["Focus"] = "Strasse"
        args["SubmitAction"] = "changedEvent"
        args["Ort"] = self._ort
        args["Strasse"] = self._strasse
        args["Hausnummer"] = 0
        r = session.post(
            "https://webudb.udb.at/WasteManagementUDB/WasteManagementServlet", data=args
        )

        args["Focus"] = "Hausnummer"
        args["SubmitAction"] = "forward"
        args["Ort"] = self._ort
        args["Strasse"] = self._strasse
        args["Hausnummer"] = self._hausnummer
        r = session.post(
            "https://webudb.udb.at/WasteManagementUDB/WasteManagementServlet", data=args
        )

        args["ApplicationName"] = "com.athos.kd.udb.AbfuhrTerminModel"
        args["Focus"] = None
        args["IsLastPage"] = "true"
        args["Method"] = "POST"
        args["PageName"] = "Terminliste"
        args["SubmitAction"] = "filedownload_ICAL"
        del args["Ort"]
        del args["Strasse"]
        del args["Hausnummer"]
        r = session.post(
            "https://webudb.udb.at/WasteManagementUDB/WasteManagementServlet", data=args
        )

        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries
    def fetch(self):
        params = {"key": self._address}
        r = requests.get(
            "https://www.belmont.wa.gov.au/api/intramaps/getaddresses",
            params=params)
        r.raise_for_status()
        j = r.json()

        if len(j) == 0:
            raise Exception("address not found")

        if len(j) > 1:
            raise Exception("multiple addresses found")

        params = {"mapkey": j[0]["mapkey"], "dbkey": j[0]["dbkey"]}
        r = requests.get(
            "https://www.belmont.wa.gov.au/api/intramaps/getpropertydetailswithlocalgov",
            params=params,
        )
        r.raise_for_status()
        data = r.json()["data"]

        entries = []

        # get general waste
        date = datetime.datetime.strptime(data["BinDayGeneralWasteFormatted"],
                                          "%Y-%m-%dT%H:%M:%S").date()
        entries.append(
            Collection(
                date=date,
                t="General Waste",
                icon="mdi:trash-can",
            ))

        # get recycling
        date = datetime.datetime.strptime(data["BinDayRecyclingFormatted"],
                                          "%Y-%m-%dT%H:%M:%S").date()
        entries.append(
            Collection(
                date=date,
                t="Recycling",
                icon="mdi:recycle",
            ))

        return entries
    def fetch(self):
        locationId = 0

        address = "{} {} {} NSW {}".format(self.street_number,
                                           self.street_name, self.suburb,
                                           self.post_code)

        q = requote_uri(str(API_URLS["address_search"]).format(address))

        # Retrieve suburbs
        r = requests.get(q, headers=HEADERS)

        data = json.loads(r.text)

        # Find the ID for our suburb
        for item in data["locations"]:
            locationId = item["Id"]
            break

        if locationId == 0:
            return []

        # Retrieve the upcoming collections for our property
        q = requote_uri(str(API_URLS["collection"]).format(locationId))

        r = requests.get(q, headers=HEADERS)

        data = json.loads(r.text)

        responseContent = data["responseContent"]

        soup = BeautifulSoup(responseContent, "html.parser")
        services = soup.find_all("div", attrs={"class": "service-details"})

        entries = []

        for item in services:
            # test if <div> contains a valid date. If not, is is not a collection item.
            date_text = item.find("span")
            try:
                date = datetime.datetime.strptime(date_text.text,
                                                  "%A%d %b %Y").date()

            except ValueError:
                continue

            waste_type = item.contents[0].strip()

            entries.append(
                Collection(
                    date=date,
                    t=waste_type,
                    icon=ICON_MAP.get(waste_type, "mdi:trash-can"),
                ))

        return entries
    def fetch(self):
        session = requests.session()

        r = session.get(
            "https://aao.rh-entsorgung.de/WasteManagementRheinhunsrueck/WasteManagementServlet",
            params={
                "SubmitAction": "wasteDisposalServices",
                "InFrameMode": "TRUE"
            },
        )
        r.raise_for_status()
        r.encoding = "utf-8"

        parser = HiddenInputParser()
        parser.feed(r.text)

        args = parser.args
        args["Ort"] = self._city
        args["Strasse"] = self._street
        args["Hausnummer"] = str(self._hnr)
        args["Hausnummerzusatz"] = self._suffix
        args["SubmitAction"] = "CITYCHANGED"
        r = session.post(
            "https://aao.rh-entsorgung.de/WasteManagementRheinhunsrueck/WasteManagementServlet",
            data=args,
        )
        r.raise_for_status()

        args["SubmitAction"] = "forward"
        args["ContainerGewaehltRM"] = "on"
        args["ContainerGewaehltBM"] = "on"
        args["ContainerGewaehltLVP"] = "on"
        args["ContainerGewaehltPA"] = "on"
        args["ContainerGewaehltPrMuell"] = "on"
        r = session.post(
            "https://aao.rh-entsorgung.de/WasteManagementRheinhunsrueck/WasteManagementServlet",
            data=args,
        )
        r.raise_for_status()

        args[
            "ApplicationName"] = "com.athos.kd.rheinhunsrueck.AbfuhrTerminModel"
        args["SubmitAction"] = "filedownload_ICAL"
        r = session.post(
            "https://aao.rh-entsorgung.de/WasteManagementRheinhunsrueck/WasteManagementServlet",
            data=args,
        )
        r.raise_for_status()

        dates = self._ics.convert(r.text)

        entries = []
        for d in dates:
            entries.append(Collection(d[0], d[1]))
        return entries