Exemplo n.º 1
0
def parseOsmTAGRelation() :
    networkId = '3921495'
    url = "http://overpass-api.de/api/interpreter?data=relation%28"+networkId+"%29%3Brel%28r%29%3Bout%20body%3B%0A"
    s = localPageCache.getPage(url)

    # Parsing the Overpass API result of TAG relation 3300434
    soup = BeautifulSoup(s)
    lineRelations = soup.findAll("relation")

    # Progress bar is a cool feature.
    (termWidth, height) = console.getTerminalSize()
    #total = len(lineRelations)
    #index = 0

    lines = list()

    for aLine in lineRelations :  # 48 elements
        # index = index+1
        # percentage = index / total
        # sys.stdout.write("\r")
        # for i in range(int(termWidth*percentage)):
        #     sys.stdout.write("-")
        #     sys.stdout.flush()
        myLine = OsmLine(aLine)  # each objects parse the data related to its line
        lines.append(myLine)

    jsonOutput = json.dumps(lines, indent=4, sort_keys=True, cls=OsmLineEncoder)

    if verbose:
        print(jsonOutput)

    return jsonOutput
Exemplo n.º 2
0
    def locateStation(self):
        nominatimUrl = "http://open.mapquestapi.com/nominatim/v1/reverse.php?key=NpfVO4ocnBw3PfHSrVCqpGeLzyy4F515&osm_type=N&accept-language=fr&format=json&&lat=" + str(self.location.latitude) + "&lon=" + str(self.location.longitude)
        # nominatimUrl = "http://nominatim.openstreetmap.org/reverse?format=json&osm_type=N&accept-language=fr&lat=" + str(self.location.latitude) + "&lon=" + str(self.location.longitude)
        nominatimData = localPageCache.getPage(nominatimUrl, True)
        print(".", end="")
        nominatimJson = None
        try:
            nominatimJson = json.loads(nominatimData)
        except ValueError:
            print('----------------- ', nominatimUrl)

        if "address" in nominatimJson:
            if "city" in nominatimJson["address"]:
                self.city = nominatimJson["address"]["city"]
            elif "village" in nominatimJson["address"]:
                self.city = nominatimJson["address"]["village"]
            elif "town" in nominatimJson["address"]:
                self.city = nominatimJson["address"]["town"]
            elif "hamlet" in nominatimJson["address"]:
                self.city = nominatimJson["address"]["hamlet"]
            else:
                print(nominatimUrl, " :// ")
                self.city = "HORRIBLE ERROR2"

            if "road" in nominatimJson['address']:
                self.road = nominatimJson['address']['road']
            elif "pedestrian" in nominatimJson['address']:
                self.road = nominatimJson['address']['pedestrian']
            elif "footway" in nominatimJson['address']:
                self.road = nominatimJson['address']['footway']
            else:
                self.road = ""
        else:
            self.city = "HORRIBLE ERROR"
            print(nominatimUrl, " :/ ")
Exemplo n.º 3
0
def parseFichesHoraire():

    urlFicheHoraire = "http://www.tag.fr/180-fiches-horaires.htm"
    s = localPageCache.getPage(urlFicheHoraire)
    soup = BeautifulSoup(s)

    liste = soup.find("ul", "liste")
    items = liste.findAll("li", "item")

    print(len(items))
Exemplo n.º 4
0
def parseFichesHoraire() : 
    
    urlFicheHoraire = "http://www.tag.fr/180-fiches-horaires.htm"
    s = localPageCache.getPage(urlFicheHoraire)
    soup = BeautifulSoup(s)
    
    liste = soup.find("ul", "liste")
    items = liste.findAll("li", "item")
    
    print(len(items))
Exemplo n.º 5
0
    def testStationSum(self, directionSet):
        resultSet = set()
        for aDirection in directionSet:
            url = "http://api.openstreetmap.org/api/0.6/relation/"+str(aDirection.id)

            s = localPageCache.getPage(url)
            soup = BeautifulSoup(s)
            orderedStations = soup.findAll(member_role_stop)

            for aStation in orderedStations:
                resultSet.add(int(aStation["ref"]))
        #print([x.id for x in directionSet])
        return len(resultSet)
Exemplo n.º 6
0
def getPointArrets():
    global lines, stations
    if os.path.isfile('MMStations.json'):
        with open('MMStations.json', 'r') as fp:
            _json = json.load(fp)
            stations = {x["id"]: Station(x, lines) for x in _json.values()}
    else:
        url = "http://data.metromobilite.fr/api/bbox/json?types=pointArret"
        s = localPageCache.getPage(url)
        pointArretsJson = json.loads(s)
        _json = pointArretsJson["features"]
        stations = {x["properties"]["id"]: Station(x, lines) for x in _json}
        with open('MMStations.json', 'w') as fp:
            json.dump(stations, fp, indent=2, sort_keys=True, cls=StationJSONEncoder)
Exemplo n.º 7
0
    def testStationSum(self, directionSet):
        resultSet = set()
        for aDirection in directionSet:
            url = "http://api.openstreetmap.org/api/0.6/relation/" + str(
                aDirection.id)

            s = localPageCache.getPage(url)
            soup = BeautifulSoup(s)
            orderedStations = soup.findAll(member_role_stop)

            for aStation in orderedStations:
                resultSet.add(int(aStation["ref"]))
        #print([x.id for x in directionSet])
        return len(resultSet)
Exemplo n.º 8
0
    def fetchStations(self):

        # Overpass doesn't provide a ordered detailled list, so it uses the base OSM API.
        url = "http://api.openstreetmap.org/api/0.6/relation/" + str(self.id)
        #f = urlopen(url)
        #s = f.read()
        s = localPageCache.getPage(url)
        soup = BeautifulSoup(s)
        orderedStations = soup.findAll(member_role_stop)

        for aStation in orderedStations:
            # Only storing the OSM node ID of the station
            self.__stations.append(int(aStation["ref"]))
            if not ots.hasStation(int(aStation["ref"])):
                print("Error : ", int(aStation["ref"]), "not present")
Exemplo n.º 9
0
    def fetchStations(self):

        # Overpass doesn't provide a ordered detailled list, so it uses the base OSM API.
        url = "http://api.openstreetmap.org/api/0.6/relation/" + str(self.id)
        #f = urlopen(url)
        #s = f.read()
        s = localPageCache.getPage(url)
        soup = BeautifulSoup(s)
        orderedStations = soup.findAll(member_role_stop)

        for aStation in orderedStations:
            # Only storing the OSM node ID of the station
            self.__stations.append(int(aStation["ref"]))
            if not ots.hasStation(int(aStation["ref"])):
                print("Error : ", int(aStation["ref"]), "not present")
Exemplo n.º 10
0
def getPointArrets():
    global lines, stations
    if os.path.isfile('MMStations.json'):
        with open('MMStations.json', 'r') as fp:
            _json = json.load(fp)
            stations = {x["id"]: Station(x, lines) for x in _json.values()}
    else:
        url = "http://data.metromobilite.fr/api/bbox/json?types=pointArret"
        s = localPageCache.getPage(url)
        pointArretsJson = json.loads(s)
        _json = pointArretsJson["features"]
        stations = {x["properties"]["id"]: Station(x, lines) for x in _json}
        with open('MMStations.json', 'w') as fp:
            json.dump(stations,
                      fp,
                      indent=2,
                      sort_keys=True,
                      cls=StationJSONEncoder)
Exemplo n.º 11
0
def findLineIds():
    url = "http://tag.mobitrans.fr/horaires/index.asp?rub_code=23&keywords=e"
#    f = urlopen(url)
#    s = f.read()
    s = localPageCache.getPage(url)
    soup = BeautifulSoup(s)
    rubDiv = soup.find("div", class_="rub_content")

    ulLignes = rubDiv.find("ul", class_="lig")
    lignes = ulLignes.findAll("li", recursive=False)
    lines = list()


    for ligne in lignes:

        if ligne.find("img") is not None and len(ligne.findAll("img")) is 2:
                name = ligne.findAll("img")[1]['alt']

                aLine = dict()
                aLine['name'] = name
                url = ligne.find("a").attrs['href']
                parsed = parse_qs(urlparse(url).query, keep_blank_values=True)
                aLine['lineID'] = int(parsed["lign_id"][0])
                lines.append(aLine)
        elif ligne.find("img") is not None and len(ligne.findAll("img")) is 1 :
            name = ligne.find('span').text

            aLine = dict()
            aLine['name'] = name
            url = ligne.find("a").attrs['href']
            parsed = parse_qs(urlparse(url).query, keep_blank_values=True)
            aLine['lineID'] = int(parsed["lign_id"][0])
            lines.append(aLine)
        else :
            print('PROBLEM')

    if(verbose):
        print(json.dumps(lines, indent=4, sort_keys=True))

    return lines
Exemplo n.º 12
0
def stationsForLine(lineID, sens):
    # Caching the webpages to only retrieve them once from the web
    if str(lineID) in _mbtStations and "sens" + str(sens) in _mbtStations[str(
            lineID)]:
        return _mbtStations[str(lineID)]["sens" + str(sens)]

    else:
        url = "http://tag.mobitrans.fr/horaires/index.asp?rub_code=23&typeSearch=line&lign_id=" + str(
            lineID) + "&sens=" + str(sens)
        s = localPageCache.getPage(url)

        #Using BeautifulSoup to parse the DOM
        soup = BeautifulSoup(s)
        rubDiv = soup.find("div", class_="rub_content")

        ulStops = rubDiv.find("ul", class_="stops")
        if (not ulStops):
            return list()

        stops = ulStops.findAll("li", recursive=False)

        lineStops = list()

        for aStop in stops:
            theStop = dict()
            theStop['name'] = aStop.next.string
            url = aStop.find("a").attrs['href']
            parsed = parse_qs(urlparse(url).query, keep_blank_values=True)
            theStop['stationID'] = int(parsed["pa_id"][0])
            lineStops.append(theStop)

            # stationDict = dict()
            # stationDict['sens'] = sens
            # stationDict['stations'] = lineStops
        if str(lineID) not in _mbtStations:
            _mbtStations[str(lineID)] = dict()

        _mbtStations[str(lineID)]["sens" + str(sens)] = lineStops
        return lineStops
Exemplo n.º 13
0
def findLineIds():
    url = "http://tag.mobitrans.fr/horaires/index.asp?rub_code=23&keywords=e"
    #    f = urlopen(url)
    #    s = f.read()
    s = localPageCache.getPage(url)
    soup = BeautifulSoup(s)
    rubDiv = soup.find("div", class_="rub_content")

    ulLignes = rubDiv.find("ul", class_="lig")
    lignes = ulLignes.findAll("li", recursive=False)
    lines = list()

    for ligne in lignes:

        if ligne.find("img") is not None and len(ligne.findAll("img")) is 2:
            name = ligne.findAll("img")[1]['alt']

            aLine = dict()
            aLine['name'] = name
            url = ligne.find("a").attrs['href']
            parsed = parse_qs(urlparse(url).query, keep_blank_values=True)
            aLine['lineID'] = int(parsed["lign_id"][0])
            lines.append(aLine)
        elif ligne.find("img") is not None and len(ligne.findAll("img")) is 1:
            name = ligne.find('span').text

            aLine = dict()
            aLine['name'] = name
            url = ligne.find("a").attrs['href']
            parsed = parse_qs(urlparse(url).query, keep_blank_values=True)
            aLine['lineID'] = int(parsed["lign_id"][0])
            lines.append(aLine)
        else:
            print('PROBLEM')

    if (verbose):
        print(json.dumps(lines, indent=4, sort_keys=True))

    return lines
Exemplo n.º 14
0
def locateStations():
    for key in _stations:
        sys.stdout.write('.')
        sys.stdout.flush()
        aStation = _stations[key]
        nominatimUrl = "http://nominatim.openstreetmap.org/reverse?format=json&osm_type=N&accept-language=fr&osm_id=" + str(
            aStation["id"])
        nominatimData = localPageCache.getPage(nominatimUrl)

        try:
            nominatimJson = json.loads(nominatimData)
        except ValueError:
            print('----------------- ', nominatimUrl)

        if "address" in nominatimJson:
            if "city" in nominatimJson["address"]:
                aStation["city"] = nominatimJson["address"]["city"]
            elif "village" in nominatimJson["address"]:
                aStation["city"] = nominatimJson["address"]["village"]
            elif "town" in nominatimJson["address"]:
                aStation["city"] = nominatimJson["address"]["town"]
            elif "hamlet" in nominatimJson["address"]:
                aStation["city"] = nominatimJson["address"]["hamlet"]
            else:
                aStation["city"] = "HORRIBLE ERROR2"

            if "road" in nominatimJson['address']:
                aStation['road'] = nominatimJson['address']['road']
            elif "pedestrian" in nominatimJson['address']:
                aStation['road'] = nominatimJson['address']['pedestrian']
            elif "footway" in nominatimJson['address']:
                aStation['road'] = nominatimJson['address']['footway']
            else:
                aStation['road'] = ""

        else:
            aStation["city"] = "HORRIBLE ERROR"
    print('')
Exemplo n.º 15
0
def locateStations():
    for key in _stations:
        sys.stdout.write('.')
        sys.stdout.flush()
        aStation = _stations[key]
        nominatimUrl = "http://nominatim.openstreetmap.org/reverse?format=json&osm_type=N&accept-language=fr&osm_id=" + str(aStation["id"])
        nominatimData = localPageCache.getPage(nominatimUrl)

        try:
            nominatimJson = json.loads(nominatimData)
        except ValueError:
            print('----------------- ',nominatimUrl)

        if "address" in nominatimJson:
            if "city" in nominatimJson["address"]:
                aStation["city"] = nominatimJson["address"]["city"]
            elif "village" in nominatimJson["address"]:
                aStation["city"] = nominatimJson["address"]["village"]
            elif "town" in nominatimJson["address"]:
                aStation["city"] = nominatimJson["address"]["town"]
            elif "hamlet" in nominatimJson["address"]:
                aStation["city"] = nominatimJson["address"]["hamlet"]
            else:
                aStation["city"] = "HORRIBLE ERROR2"

            if "road" in nominatimJson['address']:
                aStation['road'] = nominatimJson['address']['road']
            elif "pedestrian" in nominatimJson['address']:
                aStation['road'] = nominatimJson['address']['pedestrian']
            elif "footway" in nominatimJson['address']:
                aStation['road'] = nominatimJson['address']['footway']
            else:
                aStation['road'] = ""

        else:
            aStation["city"] = "HORRIBLE ERROR"
    print('')
Exemplo n.º 16
0
def stationsForLine(lineID, sens):
    # Caching the webpages to only retrieve them once from the web
    if str(lineID) in _mbtStations and "sens" + str(sens) in _mbtStations[str(lineID)]:
        return _mbtStations[str(lineID)]["sens" + str(sens)]

    else:
        url = "http://tag.mobitrans.fr/horaires/index.asp?rub_code=23&typeSearch=line&lign_id=" + str(lineID) + "&sens=" + str(sens)
        s = localPageCache.getPage(url)

        #Using BeautifulSoup to parse the DOM
        soup = BeautifulSoup(s)
        rubDiv = soup.find("div", class_="rub_content")

        ulStops = rubDiv.find("ul", class_="stops")
        if(not ulStops):
            return list()

        stops = ulStops.findAll("li", recursive=False)

        lineStops = list()

        for aStop in stops:
            theStop = dict()
            theStop['name'] = aStop.next.string
            url = aStop.find("a").attrs['href']
            parsed = parse_qs(urlparse(url).query, keep_blank_values=True)
            theStop['stationID'] = int(parsed["pa_id"][0])
            lineStops.append(theStop)

                # stationDict = dict()
                # stationDict['sens'] = sens
                # stationDict['stations'] = lineStops
        if str(lineID) not in _mbtStations:
            _mbtStations[str(lineID)] = dict()

        _mbtStations[str(lineID)]["sens" + str(sens)] = lineStops
        return lineStops
Exemplo n.º 17
0
def parseOsmTAGRelation():
    networkId = '3921495'
    url = "http://overpass-api.de/api/interpreter?data=relation%28" + networkId + "%29%3Brel%28r%29%3Bout%20body%3B%0A"
    s = localPageCache.getPage(url)

    # Parsing the Overpass API result of TAG relation 3300434
    soup = BeautifulSoup(s)
    lineRelations = soup.findAll("relation")

    # Progress bar is a cool feature.
    (termWidth, height) = console.getTerminalSize()
    #total = len(lineRelations)
    #index = 0

    lines = list()

    for aLine in lineRelations:  # 48 elements
        # index = index+1
        # percentage = index / total
        # sys.stdout.write("\r")
        # for i in range(int(termWidth*percentage)):
        #     sys.stdout.write("-")
        #     sys.stdout.flush()
        myLine = OsmLine(
            aLine)  # each objects parse the data related to its line
        lines.append(myLine)

    jsonOutput = json.dumps(lines,
                            indent=4,
                            sort_keys=True,
                            cls=OsmLineEncoder)

    if verbose:
        print(jsonOutput)

    return jsonOutput
Exemplo n.º 18
0
    def locateStation(self):
        nominatimUrl = "http://open.mapquestapi.com/nominatim/v1/reverse.php?key=NpfVO4ocnBw3PfHSrVCqpGeLzyy4F515&osm_type=N&accept-language=fr&format=json&&lat=" + str(
            self.location.latitude) + "&lon=" + str(self.location.longitude)
        # nominatimUrl = "http://nominatim.openstreetmap.org/reverse?format=json&osm_type=N&accept-language=fr&lat=" + str(self.location.latitude) + "&lon=" + str(self.location.longitude)
        nominatimData = localPageCache.getPage(nominatimUrl, True)
        print(".", end="")
        nominatimJson = None
        try:
            nominatimJson = json.loads(nominatimData)
        except ValueError:
            print('----------------- ', nominatimUrl)

        if "address" in nominatimJson:
            if "city" in nominatimJson["address"]:
                self.city = nominatimJson["address"]["city"]
            elif "village" in nominatimJson["address"]:
                self.city = nominatimJson["address"]["village"]
            elif "town" in nominatimJson["address"]:
                self.city = nominatimJson["address"]["town"]
            elif "hamlet" in nominatimJson["address"]:
                self.city = nominatimJson["address"]["hamlet"]
            else:
                print(nominatimUrl, " :// ")
                self.city = "HORRIBLE ERROR2"

            if "road" in nominatimJson['address']:
                self.road = nominatimJson['address']['road']
            elif "pedestrian" in nominatimJson['address']:
                self.road = nominatimJson['address']['pedestrian']
            elif "footway" in nominatimJson['address']:
                self.road = nominatimJson['address']['footway']
            else:
                self.road = ""
        else:
            self.city = "HORRIBLE ERROR"
            print(nominatimUrl, " :/ ")
Exemplo n.º 19
0
def groupWays(relationId, lineName):

    # Retrieving the directions for a line
    lineId = "http://api.openstreetmap.org/api/0.6/relation/" + relationId
    s = localPageCache.getPage(lineId)
    soup = BeautifulSoup(s)
    members = soup.findAll("member")
    directionId = [x["ref"] for x in members]

    query = """
    <osm-script>
      <id-query type="relation" ref=" """ + relationId + """ "/>
      <recurse type="relation-relation"/>

      <recurse type="relation-way" role="forward" />

     <print mode="body"/>
      <recurse type="down" />
      <print mode="skeleton" order="quadtile"/>
    </osm-script>
    """

    s = overpass.query(query)
    soup = BeautifulSoup(s)

    nodeNodes = soup.findAll("node")

    for aNode in nodeNodes:
        nodeId = aNode["id"]  # Is an number but stored as string
        nodesDict[nodeId] = OsmNode(aNode)

    # Parsing the ways
    wayNodes = soup.findAll("way")
    for aWay in wayNodes:
        wayId = aWay["id"]  # Is an number but stored as string
        waysDict[wayId] = OsmWay(aWay, lineName)

    directionsGroupedWays = list()  # ordered groupedWays for each direction

    # For each direction
    for aRelationId in directionId:

        groupedWays = list()  # the global way with every node of the direction
        osmApiQuery = "http://api.openstreetmap.org/api/0.6/relation/" + aRelationId

        s = localPageCache.getPage(osmApiQuery)
        soup = BeautifulSoup(s)

        members = soup.findAll("member", role=re.compile("forward"))
        
        ways = list()
        for aMember in members :
            ways.append(aMember["ref"])

        subWay =  list()
        shared = len(waysDict[ways[0]].lines) > 1  # wheter the lines starts shared or not. 
        previous = None


        # Merging consecutive ways which have same caracteristicts (shared or not shared)
        for index, aWay in enumerate(ways): 
            # Todo : If merging back into the same pair of lines => averaging the section
            if shared != waysDict[aWay].isShared() :

                mySubWay = groupedWay(subWay, waysDict[previous].lines)

                groupedWays.append(mySubWay)
                subRoutes[mySubWay.id] = (mySubWay, )

                subWay = list()
                shared = not shared
            subWay.extend(waysDict[aWay].nodesList)
            previous = aWay

        mySubWay = groupedWay(subWay, waysDict[aWay].lines)

        groupedWays.append(mySubWay)
        subRoutes[mySubWay.id] = (mySubWay, )

        directionsGroupedWays.append(groupedWays)

        # if groupedWays[-1].lines == ['C'] and groupedWays[-2].lines == ['B','C'] : 
        #     del groupedWays[-1]

    return directionsGroupedWays
Exemplo n.º 20
0
        stationNode.set("id", str(aStation["id"]))
        stationNode.set("city", aStation["city"])
        stationNode.set("road", aStation['road'])
        stationNode.set("accessible", str(aStation["accessible"]))
    tree = ET.ElementTree(root)

    tree.write("stations.xml", pretty_print=True, encoding="utf-8", xml_declaration=True)


def stations():
    return _stations

print("Loading OSM stations")

_networkId = '3921495'
_s = localPageCache.getPage("http://overpass-api.de/api/interpreter?data=relation%28" + _networkId + "%29%3Brel%28r%29%3Brel%28r%29%3Bnode%28r%29%3Bout%3B")

_xml = BeautifulSoup(_s)
_stations = dict()
_relatedStations = dict()
_soloStations = list()
parseStations()  # inits soloStations & relatedStations

locateStations()

printXml()

with open('stations.json', 'w') as fp:
    json.dump(_stations, fp, indent=2, sort_keys=True)

Exemplo n.º 21
0
def groupWays(relationId, lineName):

    # Retrieving the directions for a line
    lineId = "http://api.openstreetmap.org/api/0.6/relation/" + relationId
    s = localPageCache.getPage(lineId)
    soup = BeautifulSoup(s)
    members = soup.findAll("member")
    directionId = [x["ref"] for x in members]

    query = """
    <osm-script>
      <id-query type="relation" ref=" """ + relationId + """ "/>
      <recurse type="relation-relation"/>

      <recurse type="relation-way" role="forward" />

     <print mode="body"/>
      <recurse type="down" />
      <print mode="skeleton" order="quadtile"/>
    </osm-script>
    """

    s = overpass.query(query)
    soup = BeautifulSoup(s)

    nodeNodes = soup.findAll("node")

    for aNode in nodeNodes:
        nodeId = aNode["id"]  # Is an number but stored as string
        nodesDict[nodeId] = OsmNode(aNode)

    # Parsing the ways
    wayNodes = soup.findAll("way")
    for aWay in wayNodes:
        wayId = aWay["id"]  # Is an number but stored as string
        waysDict[wayId] = OsmWay(aWay, lineName)

    directionsGroupedWays = list()  # ordered groupedWays for each direction

    # For each direction
    for aRelationId in directionId:

        groupedWays = list()  # the global way with every node of the direction
        osmApiQuery = "http://api.openstreetmap.org/api/0.6/relation/" + aRelationId

        s = localPageCache.getPage(osmApiQuery)
        soup = BeautifulSoup(s)

        members = soup.findAll("member", role=re.compile("forward"))

        ways = list()
        for aMember in members:
            ways.append(aMember["ref"])

        subWay = list()
        shared = len(waysDict[
            ways[0]].lines) > 1  # wheter the lines starts shared or not.
        previous = None

        # Merging consecutive ways which have same caracteristicts (shared or not shared)
        for index, aWay in enumerate(ways):
            # Todo : If merging back into the same pair of lines => averaging the section
            if shared != waysDict[aWay].isShared():

                mySubWay = groupedWay(subWay, waysDict[previous].lines)

                groupedWays.append(mySubWay)
                subRoutes[mySubWay.id] = (mySubWay, )

                subWay = list()
                shared = not shared
            subWay.extend(waysDict[aWay].nodesList)
            previous = aWay

        mySubWay = groupedWay(subWay, waysDict[aWay].lines)

        groupedWays.append(mySubWay)
        subRoutes[mySubWay.id] = (mySubWay, )

        directionsGroupedWays.append(groupedWays)

        # if groupedWays[-1].lines == ['C'] and groupedWays[-2].lines == ['B','C'] :
        #     del groupedWays[-1]

    return directionsGroupedWays
Exemplo n.º 22
0
def getLines():
    global lines
    url = "http://data.metromobilite.fr/api/routers/default/index/routes" #"http://data.metromobilite.fr/otp/routers/default/index/routes"
    s = localPageCache.getPage(url)
    linesJson = json.loads(s)
    lines = {x["id"].replace(":", "_"): Line(x) for x in linesJson}
Exemplo n.º 23
0
def getLines():
    global lines
    url = "http://data.metromobilite.fr/api/routers/default/index/routes"  #"http://data.metromobilite.fr/otp/routers/default/index/routes"
    s = localPageCache.getPage(url)
    linesJson = json.loads(s)
    lines = {x["id"].replace(":", "_"): Line(x) for x in linesJson}
Exemplo n.º 24
0
    tree.write("stations.xml",
               pretty_print=True,
               encoding="utf-8",
               xml_declaration=True)


def stations():
    return _stations


print("Loading OSM stations")

_networkId = '3921495'
_s = localPageCache.getPage(
    "http://overpass-api.de/api/interpreter?data=relation%28" + _networkId +
    "%29%3Brel%28r%29%3Brel%28r%29%3Bnode%28r%29%3Bout%3B")

_xml = BeautifulSoup(_s)
_stations = dict()
_relatedStations = dict()
_soloStations = list()
parseStations()  # inits soloStations & relatedStations

locateStations()

printXml()

with open('stations.json', 'w') as fp:
    json.dump(_stations, fp, indent=2, sort_keys=True)