Exemplo n.º 1
0
    def update(self, scraper=None):
        scraper = scraper or utils.PyBikesScraper()

        stations = []

        data = json.loads(scraper.request(self.feed_url))
        for station in data:
            # Skip "Loldesign" stations
            if station['googleMapY'] == "" or station['googleMapX'] == "":
                continue

            longitude = float(station['googleMapY'])
            latitude = float(station['googleMapX'])

            name = station['name']
            free = int(station['available_slots_size'])
            bikes = int(station['unavailable_slots_size'])

            extra = {
                'uid': int(station['id']),
                'open': station['status'] == 'Ativa',
                'number': int(station['station_number']),
                'bike_uids': [bike['id'] for bike in station['bikes']]

            }

            station = BikeShareStation(name, latitude, longitude, bikes, free,
                                       extra)
            stations.append(station)

        if self.bounding_box:
            stations = utils.filter_bounds(stations, None, self.bounding_box)

        self.stations = list(stations)
Exemplo n.º 2
0
    def update(self, scraper=None):
        scraper = scraper or utils.PyBikesScraper()

        stations = []

        data = json.loads(scraper.request(self.feed_url))
        for station in data:
            # Skip "Loldesign" stations
            if station['googleMapY'] == "" or station['googleMapX'] == "":
                continue

            longitude = float(station['googleMapY'])
            latitude = float(station['googleMapX'])

            name = station['name']
            free = int(station['available_slots_size'])
            bikes = int(station['unavailable_slots_size'])

            extra = {
                'uid': int(station['id']),
                'open': station['status'] == 'Ativa',
                'number': int(station['station_number']),
                'bike_uids': [bike['id'] for bike in station['bikes']]
            }

            station = BikeShareStation(name, latitude, longitude, bikes, free,
                                       extra)
            stations.append(station)

        if self.bounding_box:
            stations = utils.filter_bounds(stations, None, self.bounding_box)

        self.stations = list(stations)
Exemplo n.º 3
0
 def update(self, scraper=None):
     scraper = scraper or PyBikesScraper(cache)
     html = scraper.request(self.main_url)
     data_m = re.search(r'siteContent=\'({.+?})\';', html)
     data = json.loads(data_m.group(1))
     filtered_data = filter_bounds(
         data.itervalues(), lambda s: (float(s['lat']), float(s['lng'])),
         *self.city_bounds)
     self.stations = list(map(YouBikeStation, filtered_data))
Exemplo n.º 4
0
    def update(self, scraper=None):
        scraper = scraper or utils.PyBikesScraper()

        stations = []

        data = json.loads(scraper.request(self.feed_url))
        stations = self.get_stations(data)
        if self.bbox:
            stations = utils.filter_bounds(stations, None, self.bbox)
        self.stations = list(stations)
Exemplo n.º 5
0
 def update(self, scraper=None):
     scraper = scraper or PyBikesScraper(cache)
     html = scraper.request(self.main_url)
     data_m = re.search(r'siteContent=\'({.+?})\';', html)
     data = json.loads(data_m.group(1))
     filtered_data = filter_bounds(
         data.itervalues(),
         lambda s: (float(s['lat']), float(s['lng'])),
         * self.city_bounds
     )
     self.stations = map(YouBikeStation, filtered_data)
Exemplo n.º 6
0
    def update(self, scraper=None):
        if scraper is None:
            scraper = PyBikesScraper(cache)
        domain_xml = etree.fromstring(
            scraper.request(self.url).encode('utf-8'))
        places = domain_xml.xpath(
            '/markers/country/city[@uid="{uid}"]/place'.format(uid=self.uid))
        # We want to raise an error if a uid is invalid, right?
        assert places, "Not found: uid {!r}, domain {!r}, url {}".format(
            self.uid, self.domain, self.url)
        if self.bbox:

            def getter(place):
                lat, lng = place.attrib['lat'], place.attrib['lng']
                return (float(lat), float(lng))

            places = filter_bounds(places, getter, self.bbox)
        # For now ignore bikes roaming around
        places = filter(lambda p: p.attrib.get('bike', '') != '1', places)

        self.stations = list(map(NextbikeStation, places))
Exemplo n.º 7
0
    def update(self, scraper=None):
        if scraper is None:
            scraper = PyBikesScraper(cache)
        domain_xml = etree.fromstring(
            scraper.request(self.url).encode('utf-8')
        )
        places = domain_xml.xpath(
            '/markers/country/city[@uid="{uid}"]/place'.format(uid=self.uid)
        )
        # We want to raise an error if a uid is invalid, right?
        assert places, "Not found: uid {!r}, domain {!r}, url {}".format(
            self.uid, self.domain, self.url
        )
        if self.bbox:
            def getter(place):
                lat, lng = place.attrib['lat'], place.attrib['lng']
                return (float(lat), float(lng))
            places = filter_bounds(places, getter, self.bbox)
        # For now ignore bikes roaming around
        places = filter(lambda p: p.attrib.get('bike', '') != '1', places)

        self.stations = map(NextbikeStation, places)