Ejemplo n.º 1
0
 def get_openflow_topology(self):
     urlpath = ODL_OPER_URL + "/" + ODL_TOPO_URL
     print("get_openlow_topology: url_path--- " + urlpath)
     topoStr = self._get_resource(urlpath, None)
     topology_list = jsonutils.loads(
         topoStr)["network-topology"]["topology"]
     return topology_list[0]
Ejemplo n.º 2
0
 def __init__(self):
     f = open('mapservice.config.json')
     d = jsonutils.loads(f.read())
     cfg = d[d['default']]
     print 'mapservice config:', cfg
     self.baseUrl = cfg['mapservice_base_url']
     self.apikey = cfg['api_key']
     self.apisecret = cfg['api_secret']
def mergeTollCases():
    cases_path = os.path.normpath('cases')
    print 'cases_path:', cases_path
    tollcases = [
        x for x in os.listdir(cases_path) if x.startswith('tollcase_')
    ]
    # for (dirpath, dirnames, filenames) in os.walk(cases_path):
    #     pass
    # print 'tollcases:', tollcases
    # print 'len:', len(tollcases)

    ods = []
    n = 0
    dups = 0
    for case in tollcases:
        fp = os.path.join(cases_path, case)
        # print
        # print fp
        # print
        f = open(fp)
        c = f.read()
        # print c
        lns = c.splitlines()
        assert (2 == len(lns))
        toll_info = lns[0]
        odpairs = lns[1]
        # print 'toll_info:', toll_info
        # print
        # print 'odpairs:', odpairs

        # parse json -> list
        odpairs = jsonutils.loads(odpairs)
        n += len(odpairs)
        # print 'odpairs:', odpairs
        for p in odpairs:
            # # dict is not hashable, we use json representation as key
            # k = jsonutils.dumps(p)
            # # print 'pair:', k
            # if k in odpairset:
            #     dups += 1
            # odpairset.add(k)
            if p in ods:
                dups += 1
                continue
            ods.append(p)
        # exit(0)
    # print 'n:', n
    # print 'dups:', dups
    print 'len(ods):', len(ods)
    # exit(0)
    return ods
Ejemplo n.º 4
0
def parse_args():
    ''' parsing command line arguments
    @return {
        'password':'******',
        'user':'******',
        'host':'host',
        'database':'database',
        'command':'command'
    }
    '''
    parser = argparse.ArgumentParser(
        description='postgre sql command executor',
        formatter_class=argparse.RawDescriptionHelpFormatter,
        epilog='''
        e.g. $ python postgre.utils.py -c "SELECT COUNT(*) FROM relations r, relation_members rm WHERE tags->'place' = 'island' AND r.id = rm.relation_id AND member_type = 'W' AND sequence_id = 0;"
        ''')
    parser.add_argument('-c', '--command', help='sql command', default='')
    parser.add_argument('-l', help='list database', action='store_true')
    parser.add_argument('-p',
                        '--password',
                        help='password',
                        default='postgres')
    parser.add_argument('-U', '--user', help='user', default='postgres')
    parser.add_argument('-s', '--settings', help='settings', default='default')
    args = parser.parse_args()

    #print args
    if args.l:
        args.command = '-l'

    if 0 == len(args.command):
        print 'empty command, nothing to do'
        parser.print_help()
        exit(0)

    f = open('postgre.config.json')
    s = f.read()
    d = jsonutils.loads(s)
    cfg = d[args.settings]
    return {
        'password': args.password,
        'user': args.user,
        'host': cfg['host'],
        'database': cfg['database'],
        'command': args.command,
    }
Ejemplo n.º 5
0
 def __init__(self, raw):
     ''' constructor
     @param raw: raw response
     '''
     super(DirectionsResponse, self).__init__(raw)
     d = jsonutils.loads(raw)
     if 'status' in d and 'status' in d['status'] and 11200 == d['status'][
             'status']:
         self.status = True
     if 'route' in d and len(d['route']) >= 1:
         # pick the first route
         droute = d['route'][0]
         if 'route_info' in droute:
             dRI = droute['route_info']
             if 'travel_dist_in_meter' in dRI:
                 self.distance = dRI['travel_dist_in_meter']
             if 'travel_time_in_second' in dRI:
                 self.travel_time = dRI['travel_time_in_second']
Ejemplo n.º 6
0
 def __init__(self, raw):
     ''' constructor
     @param raw: raw response
     '''
     super(GoogleDirectionsResponse, self).__init__(raw)
     d = jsonutils.loads(raw)
     if 'status' in d and 'OK' == d['status']:
         self.status = True
     if 'routes' in d and len(d['routes']) > 0:
         droute = d['routes'][0]
         if 'legs' in droute:
             if len(droute['legs']) > 1:
                 print 'WARNING: you may add waypoints to get direction. not implemented YET'
                 exit(0)
             dleg = droute['legs'][0]
             if 'distance' in dleg and 'value' in dleg['distance']:
                 self.distance = dleg['distance']['value']
             if 'duration' in dleg and 'value' in dleg['duration']:
                 self.travel_time = dleg['duration']['value']
Ejemplo n.º 7
0
    def getNearbyCities(self, lat, lng, extension=1.0, limit=sys.maxint):
        ''' get adjacent cities near (lat, lng)
        @param lat : latitude of the anchor point
        @param lng : longitude of the anchor point
        @param extension : constrant of extension
        @param limit : constrant of number of candidates in return
        @return: cities. [{
                'id' : city['id'],
                'lat' : city['lat'],
                'lon' : city['lon'],
                'name' : name,
                'population' : getPopulation(city)
            }]
        '''
        key = self.getCacheKey({
            'lat': lat,
            'lng': lng,
            'extension': extension,
            'limit': limit
        })
        city_centers = cacheutils.retrieve(key, 'tmp/place')
        if city_centers:
            return jsonutils.loads(city_centers)
        # left top part
        (ltLat, ltLon) = self.normalizeLatLon(lat - extension, lng - extension)
        ltCities = self.getCitiesInBoundary(ltLat,
                                            ltLon,
                                            lat,
                                            lng,
                                            limit,
                                            anchor={
                                                'lat': lat,
                                                'lng': lng
                                            })
        tmpExtension = extension
        while len(ltCities) == 0:
            print 'expand extension on left top'
            tmpExtension *= 2
            (ltLat1, ltLon1) = self.normalizeLatLon(lat - tmpExtension,
                                                    lng - tmpExtension)
            if ltLat1 == ltLat and ltLon1 == ltLon:
                print 'WARNING: cannot find nearby [%s] cities on left top part' % latlon
                break
            ltLat = ltLat1
            ltLon = ltLon1
            ltCities = self.getCitiesInBoundary(ltLat,
                                                ltLon,
                                                lat,
                                                lng,
                                                limit,
                                                anchor={
                                                    'lat': lat,
                                                    'lng': lng
                                                })

        # rigth top part
        (rtLat, rtLon) = self.normalizeLatLon(lat - extension, lng + extension)
        rtCities = self.getCitiesInBoundary(rtLat,
                                            lng,
                                            lat,
                                            rtLon,
                                            limit,
                                            anchor={
                                                'lat': lat,
                                                'lng': lng
                                            })
        tmpExtension = extension
        while len(rtCities) == 0:
            print 'expand extension on right top'
            tmpExtension *= 2
            (rtLat1, rtLon1) = self.normalizeLatLon(lat - tmpExtension,
                                                    lng + tmpExtension)
            if rtLat1 == rtLat and rtLon1 == rtLon:
                print 'WARNING: cannot find nearby [%s] cities on right top part' % latlon
                break
            rtLat = rtLat1
            rtLon = rtLon1
            rtCities = self.getCitiesInBoundary(rtLat,
                                                lng,
                                                lat,
                                                rtLon,
                                                limit,
                                                anchor={
                                                    'lat': lat,
                                                    'lng': lng
                                                })

        # left bottom part
        (lbLat, lbLon) = self.normalizeLatLon(lat + extension, lng - extension)
        lbCities = self.getCitiesInBoundary(lat,
                                            lbLon,
                                            lbLat,
                                            lng,
                                            limit,
                                            anchor={
                                                'lat': lat,
                                                'lng': lng
                                            })
        tmpExtension = extension
        while len(lbCities) == 0:
            print 'expand extension on left bottom'
            tmpExtension *= 2
            (lbLat1, lbLon1) = self.normalizeLatLon(lat + tmpExtension,
                                                    lng - tmpExtension)
            if lbLat1 == lbLat and lbLon1 == lbLon:
                print 'WARNING: cannot find nearby [%s] cities on left bottom part' % latlon
                break
            lbLat = lbLat1
            lbLon = lbLon1
            lbCities = self.getCitiesInBoundary(lat,
                                                lbLon,
                                                lbLat,
                                                lng,
                                                limit,
                                                anchor={
                                                    'lat': lat,
                                                    'lng': lng
                                                })

        # right bottom
        (rbLat, rbLon) = self.normalizeLatLon(lat + extension, lng + extension)
        rbCities = self.getCitiesInBoundary(lat,
                                            lng,
                                            rbLat,
                                            rbLon,
                                            limit,
                                            anchor={
                                                'lat': lat,
                                                'lng': lng
                                            })
        tmpExtension = extension
        while len(rbCities) == 0:
            print 'expand extension on right bottom'
            tmpExtension *= 2
            (rbLat1, rbLon1) = self.normalizeLatLon(lat + tmpExtension,
                                                    lng + tmpExtension)
            if rbLat1 == rbLat and rbLon1 == rbLon:
                print 'WARNING: cannot find nearby [%s] cities on right bottom part' % latlon
                break
            rbLat = rbLat1
            rbLon = rbLon1
            rbCities = self.getCitiesInBoundary(lat,
                                                lng,
                                                rbLat,
                                                rbLon,
                                                limit,
                                                anchor={
                                                    'lat': lat,
                                                    'lng': lng
                                                })

        city_centers = []
        # add at least one candidate in each region
        print 'left top cities: ', ltCities
        print 'right top cities: ', rtCities
        print 'left bottom cities: ', lbCities
        print 'right bottom cities: ', rbCities
        self.moveTop(city_centers, ltCities)
        self.moveTop(city_centers, rtCities)
        self.moveTop(city_centers, lbCities)
        self.moveTop(city_centers, rbCities)
        # print '4 cities :', city_centers
        while len(city_centers) < limit:
            nextCity = None
            if len(ltCities) > 0:
                nextCity = ltCities[0]
            if len(rtCities) > 0:
                if nextCity is None:
                    nextCity = rtCities[0]
                else:
                    if nextCity['population'] < rtCities[0]['population']:
                        nextCity = rtCities[0]
            if len(lbCities) > 0:
                if nextCity is None:
                    nextCity = lbCities[0]
                else:
                    if nextCity['population'] < lbCities[0]['population']:
                        nextCity = lbCities[0]
            if len(rbCities) > 0:
                if nextCity is None:
                    nextCity = rbCities[0]
                else:
                    if nextCity['population'] < rbCities[0]['population']:
                        nextCity = rbCities[0]
            if nextCity is None:
                break
            city_centers.append(nextCity)
            if nextCity in ltCities:
                ltCities.remove(nextCity)
            if nextCity in rtCities:
                rtCities.remove(nextCity)
            if nextCity in lbCities:
                lbCities.remove(nextCity)
            if nextCity in rbCities:
                rbCities.remove(nextCity)
        print '\nfind city centers:', city_centers

        #dumpAdjacentCitiesKML(wayid, latlon, city_centers)
        cacheutils.store(key, 'tmp/place', jsonutils.dumps(city_centers))
        return city_centers
Ejemplo n.º 8
0
    def getCitiesInBoundary(self, lat0, lon0, lat1, lon1, limit, anchor):
        ''' get cities in boundary [lat0,lon0->lat1,lon1]
        if we can find number of cities > limit, random pick limit cities
        @param lat0: left bottom y position in latitude
        @param lon0: left bottom x position in longitude
        @param lat1: right top y position in latitude
        @param lon1: right top x position in longitude
        @param limit: max number of cities in this boundary
        @param anchor: anchor point. {
            'lat' : lat,
            'lng' : lng
        }
        @return: list of cities. [{
                'id' : city['id'],
                'lat' : city['lat'],
                'lon' : city['lon'],
                'name' : name,
                'population' : getPopulation(city)
            }]
        '''
        # 0, 1, 1, 2, 3, 5, 8, 13, 21, 34
        time.sleep(5)
        # http://overpass-api.de/api/interpreter?data=[out:json];node["place"](31.59,-116.68,32.59,-115.68);out;
        # in overpass-api, lat0 must < lat1
        if lat0 > lat1:
            tmp = lat0
            lat0 = lat1
            lat1 = tmp
        query = '[out:json];node["place"](%.2f,%.2f,%.2f,%.2f);out;' % (
            lat0, lon0, lat1, lon1)
        query = urllib.quote(query)
        cmd = 'curl %s%s' % ('http://overpass-api.de/api/interpreter?data=',
                             query)

        output = curlutils.cachedQuery(cmd, 'tmp/overpass')
        if 'Please check /api/status for the quota of your IP address.' in output or 'The server is probably too busy to handle your request.' in output:
            curlutils.removeCachedQuery(cmd, 'tmp/overpass')
            print 'out of limitation. please wait...'
            exit(0)

        d = jsonutils.loads(output)
        cities = d['elements']

        # get population
        getPopulation = lambda city: 0 if not 'population' in city[
            'tags'] else city['tags']['population']
        for city in cities:
            population = 0
            try:
                population = str(getPopulation(city)).replace(',', '').replace(
                    '~', '').replace('abt', '').replace(' ', '')
                # fix the case : 23209 (INEGI 2010)
                population = re.sub(r'\(.*\)$', '', population)
                population = re.sub(r'people in \d{4}', '', population)
                population = int(population)
            except ValueError:
                strPopulation = getPopulation(city)
                print 'ERROR: parsing polution error:%s' % strPopulation
                population = 0
            city['tags']['population'] = population
        print cities

        print 'anchor:', anchor
        getManhattanDistance = lambda city: abs(city['lat'] - anchor[
            'lat']) + abs(city['lon'] - anchor['lng'])
        # cities = sorted(cities, key=getPopulation, reverse=True)
        cities = sorted(cities, key=getManhattanDistance)
        print cities
        # TODO, random select at most limit cities
        # if len(cities) > limit:
        #     cities = random.sample(cities, limit)
        #     print
        #     print 'pick:', cities

        # exit(0)
        city_centers = []
        n = 0
        for city in cities:
            n += 1
            if n > limit:
                break
            name = 'unknown' if not 'name' in city['tags'] else city['tags'][
                'name']
            city_centers.append({
                'id': city['id'],
                'lat': city['lat'],
                'lon': city['lon'],
                'name': name,
                'population': getPopulation(city)
            })
        # print city_centers
        return city_centers
def main():
    # Find everything in etcd that we might expect to see
    vpp_ports = {}
    uplink_ports = {}
    physnet_ports = {}
    unknown_ports = []
    for f in vppconn.get_interfaces():
        # Find downlink ports
        port_id = decode_port_tag(f['tag'])
        if port_id is not None:
            vpp_ports[port_id] = f
        else:
            uplink_tag = decode_uplink_tag(f['tag'])
            if uplink_tag is not None:
                uplink_ports[uplink_tag] = f
            else:
                physnet = decode_physnet_if_tag(f['tag'])
                if physnet is not None:
                    physnet_ports[physnet] = f
                else:
                    unknown_ports.append(f)

    for f in unknown_ports:
        print('INFO: Unknown port: %s (%d)' % (f['name'], f['sw_if_idx']))

    # Physnets want checking against the ML2 config
    for physnet, f in physnet_ports.items():
        print('INFO: Physnet %s is on port %s (%d)' %
              (physnet, f['name'], f['sw_if_idx']))

    # Confirm only the ports we expect to find are in etcd
    port_dir_in_etcd = '/networking-vpp/nodes/%s/ports' \
        % (binding_host)
    port_keypatt = re.compile(r'^/networking-vpp/nodes/([^/]+)/ports/([^/]+)$')
    result = etcd_client.read(port_dir_in_etcd, recursive=True)

    etcd_ports = {}
    for val in result.children:
        k = val.key
        res = port_keypatt.match(k)
        if res:
            host = res.group(1)
            if host != binding_host:
                continue
            uuid = res.group(2)
            print('INFO: port %s found in etcd' % uuid)
            etcd_ports[uuid] = jsonutils.loads(val.value)

    # Try and find the intersection, which should correspond, and the
    # differences, which shouldn't exist

    etcd_portset = frozenset(etcd_ports.keys())
    vpp_portset = frozenset(vpp_ports.keys())

    unexpected_ports = vpp_portset - etcd_portset
    unbound_ports = etcd_portset - vpp_portset
    ports_to_check = vpp_portset & etcd_portset

    if unexpected_ports:
        print('ERROR: unexpectedly bound ports in VPP: %s' %
              ', '.join(unexpected_ports))

    if unbound_ports:
        print('ERROR: unbound ports in etcd: %s' % ', '.join(unbound_ports))

    for f in ports_to_check:
        # etcd_value = etcd_ports[f]
        # vpp_port = vpp_ports[f]

        # Is this port bound to the VM correctly?
        pass

    # "binding_type": "tap"

    # "mtu": 1500
    # "mac_address": "fa:16:3e:2e:1d:8d"
    # "port_security_enabled": false

    # "security_groups": []}

    # "allowed_address_pairs": []
    # "fixed_ips": [{"subnet_id": "da52a3aa-a899-46c9-992e-b2b4294ce9ce"
    # "ip_address": "10.0.0.2"}
    # {"subnet_id": "51d259ab-8b6f-4a77-8375-8b364a497ab8"
    # "ip_address": "fd20:1bce:4726:0:f816:3eff:fe2e:1d8d"}]

    # Is this port in the correct network?

    etcd_networks = set()
    for name, val in etcd_ports.items():
        # Take note of the network for future checks - networks in etcd
        # are implied by their ports.
        etcd_networks.add((
            val["physnet"],
            val["network_type"],
            val["segmentation_id"],
        ))
Ejemplo n.º 10
0
 def get_openflow_topology(self):
     urlpath = ODL_OPER_URL +"/" + ODL_TOPO_URL
     print("get_openlow_topology: url_path--- " + urlpath)
     topoStr = self._get_resource(urlpath, None)
     topology_list = jsonutils.loads(topoStr)["network-topology"]["topology"]
     return topology_list[0]
Ejemplo n.º 11
0
 def list_openflow_nodes(self):
     urlpath = ODL_OPER_URL + "opendaylight-inventory:nodes/"
     print("list_openflow_nodes: url_path--- " + urlpath)
     openflowNodesStr = self._get_resource(urlpath,None)
     node_list = jsonutils.loads(openflowNodesStr)["nodes"]["node"]
     return node_list
Ejemplo n.º 12
0
def main():
    # Find everything in etcd that we might expect to see
    vpp_ports = {}
    uplink_ports = {}
    physnet_ports = {}
    unknown_ports = []
    for f in vpp.get_interfaces():
        # Find downlink ports
        port_id = decode_port_tag(f['tag'])
        if port_id is not None:
            vpp_ports[port_id] = f
        else:
            uplink_tag = decode_uplink_tag(f['tag'])
            if uplink_tag is not None:
                uplink_ports[uplink_tag] = f
            else:
                physnet = decode_physnet_if_tag(f['tag'])
                if physnet is not None:
                    physnet_ports[physnet] = f
                else:
                    unknown_ports.append(f)

    for f in unknown_ports:
        print('INFO: Unknown port: %s (%d)' % (f['name'], f['sw_if_idx']))

    # Physnets want checking against the ML2 config
    for physnet, f in physnet_ports.items():
        print('INFO: Physnet %s is on port %s (%d)' %
              (physnet, f['name'], f['sw_if_idx']))

    # Confirm only the ports we expect to find are in etcd
    port_dir_in_etcd = '/networking-vpp/nodes/%s/ports' \
        % (binding_host)
    port_keypatt = re.compile(r'^/networking-vpp/nodes/([^/]+)/ports/([^/]+)$')
    result = etcd_client.read(port_dir_in_etcd, recursive=True)

    etcd_ports = {}
    for val in result.children:
        k = val.key
        res = port_keypatt.match(k)
        if res:
            host = res.group(1)
            if host != binding_host:
                continue
            uuid = res.group(2)
            print('INFO: port %s found in etcd' % uuid)
            etcd_ports[uuid] = jsonutils.loads(val.value)

    # Try and find the intersection, which should correspond, and the
    # differences, which shouldn't exist

    etcd_portset = frozenset(etcd_ports.keys())
    vpp_portset = frozenset(vpp_ports.keys())

    unexpected_ports = vpp_portset - etcd_portset
    unbound_ports = etcd_portset - vpp_portset
    ports_to_check = vpp_portset & etcd_portset

    if unexpected_ports:
        print('ERROR: unexpectedly bound ports in VPP: %s' %
              ', '.join(unexpected_ports))

    if unbound_ports:
        print('ERROR: unbound ports in etcd: %s' %
              ', '.join(unbound_ports))

    for f in ports_to_check:
        # etcd_value = etcd_ports[f]
        # vpp_port = vpp_ports[f]

        # Is this port bound to the VM correctly?
        pass

    # "binding_type": "tap"

    # "mtu": 1500
    # "mac_address": "fa:16:3e:2e:1d:8d"
    # "port_security_enabled": false

    # "security_groups": []}

    # "allowed_address_pairs": []
    # "fixed_ips": [{"subnet_id": "da52a3aa-a899-46c9-992e-b2b4294ce9ce"
    # "ip_address": "10.0.0.2"}
    # {"subnet_id": "51d259ab-8b6f-4a77-8375-8b364a497ab8"
    # "ip_address": "fd20:1bce:4726:0:f816:3eff:fe2e:1d8d"}]

    # Is this port in the correct network?

    etcd_networks = set()
    for name, val in etcd_ports.items():
        # Take note of the network for future checks - networks in etcd
        # are implied by their ports.
        etcd_networks.add((val["physnet"], val["network_type"],
                           val["segmentation_id"],))
Ejemplo n.º 13
0
    def getODPairsForTollCase(self,
                              cities_start,
                              cities_end,
                              tollcase,
                              limit=10,
                              useCache=True):
        ''' get origin->destination pairs passing toll ways in tollcase['ways']
        @param cities_start: cities list near start point
        @param cities_end: cities list near start point
        @param tollcase: {
            'start_point' : {
                'lat' : lat,
                'lng' : lng
            },
            'end_point' : {
                'lat' : lat,
                'lng' : lng
            },
            'ids' : [
                wayid0, wayid1, ...
            ]
        }
        @param limit: constraint of orig->dest pair count in return
        @param useCache: if True, try to get result from cache
        cities is list of {
                'id' : id,
                'lat' : lat,
                'lon' : lon,
                'name' : name,
                'population' : population
            }
        @return: { 'orig2dests': pairs }
        orig2dests is list of orig->dest pair. [{
                'orig' : {
                    'name':city['name'],
                    'lat':city['lat'],
                    'lon':city['lon']
                },
                'dest' : {
                    'name':c2['name'],
                    'lat':c2['lat'],
                    'lon':c2['lon']
                }
            }]
        '''
        # print 'tollcase:', tollcase
        key = self.getTollCaseCacheKey(tollcase)
        orig2dests = cacheutils.retrieve(key, 'cases', 'tollcase')

        print 'len(orig2dests):', 0 if None == orig2dests else len(orig2dests)
        if orig2dests:
            cases_good = jsonutils.loads(orig2dests)
            if len(cases_good) > 0:
                return cases_good
            elif g_skip_empty_case:
                # print 'g_skip_empty_case is True, return'
                return cases_good

        ms = mapservice.MapService()
        cases_good = []
        cases_cannot_avoid_toll = []
        cases_do_not_through_toll = []
        cases_failed = []

        # set for wayids in this test case
        cur_tollset = set(tollcase['ids'])
        #print
        #print 'tollset:', cur_tollset
        #print 'len(s):', len(cur_tollset), '\tlen(l):', len(tollcase['ids'])
        # all toll wayids in final origin->destination pairs
        passingset = set()
        # all common toll wayids in final origin->destination and cur_tollset
        commonset = set()
        for city in cities_start:
            # print '\tname', city['name']
            # print '\tlat,lon = {%f, %f}' % (city['lat'], city['lon'])
            for c2 in cities_end:
                if len(cases_good) >= limit:
                    break
                if city != c2:
                    print '\t', city['name'], '->', c2['name']
                    print '\t{%f,%f}->{%f,%f}' % (city['lat'], city['lon'],
                                                  c2['lat'], c2['lon'])
                    orig = LatLon(city['lat'], city['lon'])
                    dest = LatLon(c2['lat'], c2['lon'])
                    output = ms.directions(orig, dest)
                    # print 'output:', output
                    pair = {
                        'orig': {
                            'name': city['name'],
                            'lat': city['lat'],
                            'lon': city['lon']
                        },
                        'dest': {
                            'name': c2['name'],
                            'lat': c2['lat'],
                            'lon': c2['lon']
                        }
                    }
                    if 'java.net.SocketTimeoutException: Read timed out' in output:
                        print '\tFAILED: {%f,%f}->{%f,%f}' % (
                            city['lat'], city['lon'], c2['lat'], c2['lon'])
                        cases_failed.append(
                            '\tFAILED: {%f,%f}->{%f,%f}' %
                            (city['lat'], city['lon'], c2['lat'], c2['lon']))
                        continue
                    # passing_wayids = maputils.edgesInRouteResponse(tollcase['ids'], output)
                    passing_wayids = maputils.edgesInRouteResponse(
                        g_tollset, output)
                    tmpset = set(passing_wayids)
                    tmpcommonset = tmpset & cur_tollset
                    if 0 < len(tmpcommonset):
                        print 'passing wayids: ', passing_wayids
                        commonset |= tmpcommonset
                        if 0 == len(tmpset - cur_tollset):
                            # TODO, zexings, modify edges in route response to collect all the toll edges
                            print 'all toll ways are in testcase'
                        else:
                            print 'WARNING. not all toll ways in tollcase'
                            print 'common:', tmpcommonset
                            print 'uncommon:', tmpset - cur_tollset
                            # exit(0)
                        print '\tOK: {%f,%f}->{%f,%f}' % (
                            city['lat'], city['lon'], c2['lat'], c2['lon'])
                        is_new_case = True

                        for goodcase in cases_good[:]:
                            tmp2set = set(goodcase['ids'])
                            # print 'len(tmp2set):', len(tmp2set)
                            if len(tmpset - tmp2set) == 0:
                                # all toll edge can be find in previous origin -> destination case, skip it
                                # print 'all toll edge can be find in previous origin -> destination case, skip it'
                                is_new_case = False
                                break
                            elif len(tmp2set - tmpset) == 0:
                                # all toll edge can be find in current origin -> destination case, remove old one
                                # print 'all toll edge can be find in current origin -> destination case, remove old one'
                                cases_good.remove(goodcase)
                        # print 'len(tmpset):', len(tmpset)
                        # print 'is_new_case:', is_new_case
                        # print 'len(cases_good):', len(cases_good)
                        # exit(0)
                        if not is_new_case:
                            continue
                        pair['ids'] = passing_wayids
                        passingset |= tmpset
                        cases_good.append(pair)
                        # exit(0)
                        # TODO, zexings
                        # output2 = ms.directions(orig, dest, True)
                        # if 'java.net.SocketTimeoutException: Read timed out' in output2:
                        #     print '\tFAILED: {%f,%f}-notoll->{%f,%f}' % (city['lat'], city['lon'], c2['lat'], c2['lon'])
                        #     cases_failed.append('\tFAILED: {%f,%f}<-{%f,%f}' % (city['lat'], city['lon'], c2['lat'], c2['lon']))
                        #     continue
                        #
                        # if not maputils.edgesInRouteResponse(wayid, output2):
                        #     print '\tGOOD: {%f,%f}-notoll->{%f,%f}' % (city['lat'], city['lon'], c2['lat'], c2['lon'])
                        #     # TODO, generate kml for these two paths
                        #     cases_good.append(pair)
                        # else:
                        #     print '\tNoAvoid: {%f,%f}-notoll->{%f,%f}' % (city['lat'], city['lon'], c2['lat'], c2['lon'])
                        #     cases_cannot_avoid_toll.append(pair)
                        #     # cannot avoid toll edge
                        #     print 'cannot avoid toll edge'
                    else:
                        print '\tNoVia: {%f,%f}->{%f,%f}' % (
                            city['lat'], city['lon'], c2['lat'], c2['lon'])
                        cases_do_not_through_toll.append(pair)
        print 'good case:', cases_good
        print 'cannot avoid toll cases:', cases_cannot_avoid_toll
        print 'no toll cases:', cases_do_not_through_toll
        print 'failed cases:', cases_failed
        print 'coverage:', len(commonset), '/', len(cur_tollset), '=', (
            len(commonset) * 1.0 / len(cur_tollset))
        d = {'tollcase': tollcase, 'orig2dests': cases_good}
        # dump result
        # if len(cases_good) > 0:
        #     cacheutils.store(key, 'cases', jsonutils.dumps(cases_good), 'tollcase')
        # store result to avoid duplicate calculation
        cacheutils.store(key, 'cases', jsonutils.dumps(cases_good), 'tollcase')
        return cases_good
Ejemplo n.º 14
0
    # print 'production code : %s' % args.code
    print 'args:', args
    # exit(0)

    # 1. get all toll edges (edgeid, lat, lon)
    # if args.tollcases:
    #     cases = getUnfinishedTollEdges(args.tollcases)
    # else:
    #     cases = getTollEdges(cfg['toll_edge_cases_url'])
    f = open('tolls.merge.log')
    cases = []
    g_tollset = set()
    for ln in f.readlines():
        # print ln
        try:
            tc = jsonutils.loads(ln)
            cases.append(tc)
            g_tollset |= set(tc['ids'])
        except ValueError as e:
            continue
    fc = lambda tc: int(tc['fc'])
    cases = sorted(cases, key=fc, reverse=True)
    # print cases
    print 'case length:', len(cases)
    print 'tollset.len:', len(g_tollset)
    time.sleep(3)
    # exit(0)

    # 2. get all adjacent cities around each toll
    # case_cities = getAllAdjacentCities(cases, cfg['extension'], cfg['max'])
    # print
Ejemplo n.º 15
0
 def list_openflow_nodes(self):
     urlpath = ODL_OPER_URL + "opendaylight-inventory:nodes/"
     print("list_openflow_nodes: url_path--- " + urlpath)
     openflowNodesStr = self._get_resource(urlpath, None)
     node_list = jsonutils.loads(openflowNodesStr)["nodes"]["node"]
     return node_list
Ejemplo n.º 16
0
 def _from_json(self, datastring):
     try:
         return jsonutils.loads(datastring)
     except ValueError:
         msg = _("Cannot understand JSON")
         raise exception.MalformedRequestBody(reason=msg)