def generate_route(trip): ''' Build a path using the Mapzen's Valhalla API. ''' mode = trip['mode'] points = trip['points'] base = 'http://valhalla.mapzen.com/' key = tools.read_json('config/keys.json')['valhalla'] request = json.dumps({ 'locations': points, # 'costing': mode, # For some reasons the biking costing is not close to reality 'costing': 'pedestrian', 'directions_options': { 'units': 'kilometers' } }) url = '{0}route?json={1}&api_key={2}'.format(base, request, key) # No whitespace allowed url = url.replace(' ', '') data = get_route(url) path = tools.load_json(data) return { 'mode': mode, 'polyline': path['trip']['legs'][0]['shape'], 'maneuvers': path['trip']['legs'][0]['maneuvers'], 'distance': path['trip']['legs'][0]['summary']['length'] }
def apirouting(mode, city, departure, arrival, time, people): ''' Return a list of routes in polyline format. String adresses are not supported because they won't be needed for the mobile apps. Indeed the departure will be the same as the arrival because the mobile apps do not include full trips. Example URL: /routing/mode=takeBike&city=Toulouse& departure=[43.5639677,1.4655774]& arrival=[43.6044328,1.443463]& time=[1442935490355]&people=1 Don't forget to remove all the spaces. ''' if key == tools.read_json('config/keys.json')['routing']: situation = { 'city': city, 'departure': eval(departure), 'arrival': eval(arrival), 'time': eval(time), 'people': int(people) } if mode == 'fullTrip': routes = routing.full_trip(situation) elif mode == 'takeBike': routes = routing.take_bike(situation) elif mode == 'dropBike': routes = routing.drop_bike(situation) return jsonify({'routes': routes}) else: return jsonify({'error': 'Wrong API key.'})
def stations(city): base = 'https://api.jcdecaux.com/vls/v1/' key = tools.read_json('config/keys.json')['jcdecaux'] url = '{0}stations/?contract={1}&apiKey={2}'.format(base, city, key) data = tools.query_API(url) stations = tools.load_json(data) return normalize(stations)
def add_altitudes(stations, size=50): ''' Use the Google Maps Elevation API. ''' base = 'https://maps.googleapis.com/maps/api/elevation/json?' key = tools.read_json('config/keys.json')['google-elevation'] locationsList = '' packages = [] counter = 1 for station in stations: locationsList += '{lat},{lon}|'.format(lat=station['lat'], lon=station['lon']) # The API only allows a certain amount of locations per request counter += 1 if counter > size: locationsList += ';' counter = 1 for locations in locationsList.split(';'): locations = locations[:-1] url = base + 'locations={0}&key={1}'.format(locations, key) request = tools.query_API(url) data = tools.load_json(request) packages.append(data['results']) # Melt the packages into one list altitudes = [] for package in packages: altitudes.extend(package) # Tidy everything up for database insertion data = [] for station in zip(stations, altitudes): data.append({ 'name': station[0]['name'], 'lat': station[0]['lat'], 'lon': station[0]['lon'], 'alt': station[1]['elevation'] }) return data
def stations(city): # The city parameter is necessary so that everything works base = 'http://data.keolis-rennes.com/json/?version=1.0&' key = tools.read_json('config/keys.json')['keolis'] url = '{0}key={1}&cmd=getstation'.format(base, key) data = tools.query_API(url) stations = tools.load_json(data) return normalize(stations)
def stations(city): # The city parameter is necessary so that everything works key = tools.read_json('config/keys.json')['lacub'] url = 'http://data.lacub.fr/wfs?key={}' \ '&SERVICE=WFS&VERSION=1.1.0&' \ 'REQUEST=GetFeature' \ '&TYPENAME=CI_VCUB_P&SRSNAME=EPSG:4326' print(url) data = tools.query_API(url) stations = tools.load_xml(data) print(stations) return normalize(stations)
def compute_distances(departure, stations, mode): ''' Using the Mapbox Distance API. ''' # Interrogate the API to get the distance to each station base = 'https://api.mapbox.com/distances/v1/mapbox/' key = tools.read_json('config/keys.json')['mapbox-distance'] coordinates = { 'coordinates': [departure] + [station['p'] for station in stations] } print(coordinates) url = '{0}{1}?access_token={2}'.format(base, mode, key) data = tools.query_API(url) distances = tools.load_json(data)['rows'][0]['elements'] candidates = [] for station in zip(stations, distances): candidate = {} for information in station: candidate.update(information) candidates.append(candidate) return candidates
def compute_distances(departure, stations, mode): ''' Using the Mapbox Distance API. ''' # Interrogate the API to get the distance to each station base = 'https://api.mapbox.com/distances/v1/mapbox/' key = tools.read_json('config/keys.json')['mapbox-distance'] coordinates = { 'coordinates': [ departure ] + [station['p'] for station in stations] } print(coordinates) url = '{0}{1}?access_token={2}'.format(base, mode, key) data = tools.query_API(url) distances = tools.load_json(data)['rows'][0]['elements'] candidates = [] for station in zip(stations, distances): candidate = {} for information in station: candidate.update(information) candidates.append(candidate) return candidates
def generate_route(trip): ''' Build a path using the Mapzen's Valhalla API. ''' mode = trip['mode'] points = trip['points'] base = 'http://valhalla.mapzen.com/' key = tools.read_json('config/keys.json')['valhalla'] request = json.dumps({ 'locations': points, # 'costing': mode, # For some reasons the biking costing is not close to reality 'costing': 'pedestrian', 'directions_options': { 'units': 'kilometers' } }) url = '{0}route?json={1}&api_key={2}'.format(base, request, key) # No whitespace allowed url = url.replace(' ', '') data = get_route(url) path = tools.load_json(data) return {'mode': mode, 'polyline': path['trip']['legs'][0]['shape'], 'maneuvers': path['trip']['legs'][0]['maneuvers'], 'distance': path['trip']['legs'][0]['summary']['length']}
from flask import Flask, render_template, request, \ url_for, jsonify, g from flask.ext.babel import Babel, gettext from htmlmin import minify from lib import routing from lib import tools import json app = Flask(__name__) babel = Babel(app) cities = tools.read_json('static/cities.json') names = tools.read_json('static/names.json') stations = tools.read_json('static/stations.json') centers = tools.read_json('static/centers.json') predictions = tools.read_json('static/predictions.json') # Languages @app.before_request def before(): if request.view_args and 'lang_code' in request.view_args: g.current_lang = request.view_args['lang_code'] request.view_args.pop('lang_code') @babel.localeselector def get_locale(): return g.get('current_lang', 'en')
from apscheduler.schedulers.background import BackgroundScheduler from lib.providers import wrapper from lib.mongo import timeseries from lib import tools from datetime import datetime import time import json providers = tools.read_json('static/providers.json') centers = tools.read_json('static/centers.json') predictions = tools.read_json('static/predictions.json') def update(provider, city, predict): # Get the information for the city try: stations = wrapper.collect(provider, city) except: return # Update the database if predict == 'Yes': timeseries.update_city(stations, city) # Save the data for the map geojson = tools.json_to_geojson(stations) with open('static/geojson/{0}.geojson'.format(city), 'w') as outfile: json.dump(geojson, outfile) # Tell the server the city's data was updated with open('static/updates/{0}.txt'.format(city), 'w') as outfile: outfile.write(datetime.now().isoformat())
def apicenters(): ''' Return the list of names. ''' centers = tools.read_json('static/centers.json') return jsonify(centers)
def apiproviders(): ''' Return the list of providers/cities. ''' providers = tools.read_json('static/providers.json') return jsonify(providers)
def apicities(): ''' Return the list of countries/cities. ''' cities = tools.read_json('static/cities.json') return jsonify(cities)
def apicity_stations(city): ''' Return the latest geojson file of a city. ''' stations = tools.read_json('static/geojson/{}.geojson'.format(city)) return jsonify(stations)
def apistations(): ''' Return the list of countries/cities/stations. ''' stations = tools.read_json('static/stations.json') return jsonify(stations)
from apscheduler.schedulers.background import BackgroundScheduler from lib.providers import wrapper from lib.mongo import timeseries from lib import tools from datetime import datetime import time import json providers = tools.read_json('static/providers.json') centers = tools.read_json('static/centers.json') predictions = tools.read_json('static/predictions.json') def update(provider, city, predict): # Get the information for the city try: stations = wrapper.collect(provider, city) except: return # Update the database if predict == 'Yes': timeseries.update_city(stations, city) # Save the data for the map geojson = tools.json_to_geojson(stations) with open('static/geojson/{0}.geojson'.format(city), 'w') as outfile: json.dump(geojson, outfile) # Tell the server the city's data was updated with open('static/updates/{0}.txt'.format(city), 'w') as outfile: outfile.write(datetime.now().isoformat()) if __name__ == '__main__':
def apipredictions(): ''' Return the list of predictions. ''' predictions = tools.read_json('static/predictions.json') return jsonify(predictions)
from apscheduler.schedulers.background import BackgroundScheduler from lib.mongo import timeseries from lib.learning import munging from lib.learning import forest from lib import tools import time stationsFile = tools.read_json('static/stations.json') def learn(city): for station in stationsFile[city]: # Get all the data from the database dataframe = timeseries.query_station(city, station) # Prepare the dataframe for learning dataframe = munging.rename_columns(dataframe) dataframe = munging.prepare(dataframe) # Apply a regressor forest.fit(dataframe, 'bikes', city, station) forest.fit(dataframe, 'spaces', city, station) # timeseries.delete_city(city) if __name__ == '__main__': scheduler = BackgroundScheduler() for city in stationsFile.keys(): learn(city) scheduler.add_job(learn, 'interval', weeks=2, args=[city],
parser.add_argument('country') parser.add_argument('countryRealName') parser.add_argument('predict') # Parse the command line arguments parameters = parser.parse_args() option = parameters.option provider = parameters.provider city = parameters.city cityRealName = parameters.cityRealName country = parameters.country countryRealName = parameters.countryRealName predict = parameters.predict # Load the files stationsFile = tools.read_json('static/stations.json') providersFile = tools.read_json('static/providers.json') centersFile = tools.read_json('static/centers.json') citiesFile = tools.read_json('static/cities.json') namesFile = tools.read_json('static/names.json') predictionsFile = tools.read_json('static/predictions.json') if option in ('add', 'insert'): geo.add_city(city) # Get the current information for a city stations = wrapper.collect(provider, city) # Add the altitudes of every station stations = geography.add_altitudes(stations) # Extract latitudes, longitudes and station names latitudes = [] longitudes = []
from apscheduler.schedulers.background import BackgroundScheduler from lib.mongo import timeseries from lib.learning import munging from lib.learning import forest from lib import tools import time stationsFile = tools.read_json('static/stations.json') def learn(city): for station in stationsFile[city]: # Get all the data from the database dataframe = timeseries.query_station(city, station) # Prepare the dataframe for learning dataframe = munging.rename_columns(dataframe) dataframe = munging.prepare(dataframe) # Apply a regressor forest.fit(dataframe, 'bikes', city, station) forest.fit(dataframe, 'spaces', city, station) # timeseries.delete_city(city) if __name__ == '__main__': scheduler = BackgroundScheduler() for city in stationsFile.keys(): learn(city) scheduler.add_job(learn, 'interval', weeks=2, args=[city], misfire_grace_time=60*60*24*7, coalesce=True) scheduler.start() while True: time.sleep(10e-1000000)
def apinames(): ''' Return the list of names. ''' names = tools.read_json('static/names.json') return jsonify(names)