def get(self):
        logging.info("scanning twitter alerts")

        q = ParticulateData.all()
        q.filter("date", date.today())
        results = q.fetch(1000)

        for result in results:
            # Check if there are outstanding alerts

            q2 = TwitterAlert.all()
            q2.filter("stationid", result.stationid)
            alerts = q2.fetch(1000)

            for alert in alerts:
                logging.info("result value %d alert on %d", result.value, alert.amount)

                if result.value > alert.amount:
                    # Send Tweet
                    targetuser = alert.twittername

                    logging.info("tweet to %s", targetuser)

                    updateString = (
                        "@%s De waarde voor station %s is morgen te hoog, nl: %d, zie: http://www.vervuilingsalarm.nl/station/%s/"
                        % (targetuser, result.stationid, result.value, result.stationid)
                    )

                    twitterQueue = taskqueue.Queue("twitterqueue")
                    twitterTask = taskqueue.Task(url="/sendtweets/worker/", params={"update": updateString})
                    twitterQueue.add(twitterTask)
    def get(self, stationid):
        response = {}

        q = ParticulateData.all()
        q.filter("stationid =", stationid)
        q.filter("date > ", date.today() - timedelta(90))
        q.order("date")
        results = q.fetch(1000)

        data = []
        for result in results:
            data.append(result.value)

        logging.info("data got for station %s" % stationid)

        response["data"] = data

        # Pachube is not very happy if we hammer them on every index load
        environmentXML = memcache.get("%s_environmentxml" % stationid)

        if environmentXML is None:
            mappingQuery = PachubeMapping.all()
            mappingQuery.filter("stationid =", stationid)
            mapping = None

            if mappingQuery.count():
                mapping = mappingQuery[0]

                environmentURL = "http://www.pachube.com/api/%s.xml?key=%s" % (mapping.pachubeid, PACHUBE_API_KEY)
                logging.info("fetching url: %s", environmentURL)

                environmentGet = urlfetch.fetch(url=environmentURL)

                if environmentGet.status_code == 200 and environmentGet.content:
                    environmentXML = environmentGet.content
                    memcache.set("%s_environmentxml" % stationid, environmentGet.content, time=7 * 24 * 60 * 60)

        lat = ""
        lon = ""

        logging.debug("environment xml is - %s", environmentXML)

        if environmentXML:
            lat = re.search("<lat>(.+?)</lat>", environmentXML).group(1)
            lon = re.search("<lon>(.+?)</lon>", environmentXML).group(1)

            logging.info("got lon %s and lat %s", lon, lat)

        response["lat"] = lat
        response["lon"] = lon

        self.response.headers["Content-Type"] = "text/json"

        self.response.out.write(simplejson.dumps(response))
    def get(self):
        url = "http://www.lml.rivm.nl/data/verwachting/pm10.html"
        result = urlfetch.fetch(url)

        logging.info("parsing data")

        if result.status_code == 200:
            # Parse out the stuff

            logging.info("data fetch succeeded")

            datalines = []
            store = False

            lines = result.content.split("\n")

            for line in lines:
                if "<tr><th>nr<th>naam<th>type<th>conc" in line:
                    store = True
                else:
                    if "</table>" in line:
                        store = False
                    else:
                        if store:  # Store this line
                            parts = [part.strip() for part in line.split("<td>")[1:]]
                            datalines.append(parts)

        for line in datalines:
            # Put them in our own database
            logging.info("storing %s and %s in database", line[0], line[3])
            data = ParticulateData(stationid=line[0], value=int(line[3]))
            data.put()

            pachubeQueue = taskqueue.Queue("pachubequeue")
            pachubeTask = taskqueue.Task(
                url="/station/%s/updatepachube/" % line[0],
                params={"stationid": line[0], "stationname": line[1], "value": line[3]},
            )
            pachubeQueue.add(pachubeTask)
    def get(self, stationid):
        q = ParticulateData.all()
        q.filter("stationid =", stationid)
        q.filter("date > ", date.today() - timedelta(90))
        q.order("date")
        results = q.fetch(1000)

        mappingQuery = PachubeMapping.all()
        mappingQuery.filter("stationid =", stationid)
        mapping = None

        if mappingQuery.count():
            mapping = mappingQuery[0]

        values = {"values": results, "station": stationid, "mapping": mapping}

        # Add the flash if we got it
        if self.request.get("flash", ""):
            values["flash"] = self.request.get("flash")

        self.response.headers["Content-Type"] = "text/html"

        templatepath = os.path.join(os.path.dirname(__file__), "templates", "station.html")
        self.response.out.write(template.render(templatepath, values))