def make_valhalla_mapmatch_query(coords):

    start = time.time()

    query = json.dumps(create_query(coords))
    valhalla.Configure('./valhalla.json')
    actor = valhalla.Actor()
    response = actor.TraceAttributes(query)
    matched = json.loads(response)

    print("Time taken for Map Matching : %.3f" % (time.time() - start))

    return matched
Beispiel #2
0
    def __init__(self, tile_dir, outfile):

        self.tile_dir = tile_dir
        if not os.path.exists(self.tile_dir):
            print "Data directory " + self.tile_dir + " does not exist."
            sys.exit()

        #get the config and setup for it
        print "Building tile config..."

        config_file = "/tmp/valhalla_config.json"

        os.system("valhalla_build_config --mjolnir-tile-dir " + self.tile_dir +
                  "valhalla_tiles --mjolnir-tile-extract " + self.tile_dir +
                  "tiles.tar --mjolnir-timezone " + self.tile_dir +
                  "valhalla_tiles/timezones.sqlite --mjolnir-admin " +
                  self.tile_dir + "valhalla_tiles/admins.sqlite > " +
                  config_file)

        try:
            with open(config_file) as f:
                conf = json.load(f)
                valhalla.Configure(config_file)
                os.environ['REDIS_HOST']

        except Exception as e:
            sys.stderr.write('Problem with config file: {0}\n'.format(e))
            sys.exit(1)

        self.point_queue = Queue(maxsize=1000)
        self.processed_queue = Queue(maxsize=1000)
        self.vehicles = {}

        self.thread_local = threading.local()

        # setup output thread
        self.outfile = open(outfile, 'w')

        t_output = threading.Thread(target=self.__output_matches)
        t_output.setDaemon(1)
        t_output.start()

        # setup the processor
        pool_size = int(os.environ.get('THREAD_POOL_MULTIPLIER',
                                       1)) * multiprocessing.cpu_count()
        print "Starting validation processor with " + str(
            pool_size) + " threads"
        for x in range(pool_size):
            t = threading.Thread(target=self.__start_thread)
            t.setDaemon(1)
            t.start()
Beispiel #3
0
# -*- coding: utf-8 -*-

import sys
import os
import valhalla
import json

valhalla.Configure(sys.argv[1] if len(sys.argv) > 1 else
                   os.path.dirname(os.path.abspath(__file__)) +
                   '/valhalla.json')
actor = valhalla.Actor()
query = '{"locations":[{"lat":52.08813,"lon":5.03231},{"lat":52.09987,"lon":5.14913}],"costing":"bicycle","directions_options":{"language":"ru-RU"}}'
route = json.loads(actor.Route(query))

assert ('trip' in route)
assert ('units' in route['trip'] and route['trip']['units'] == 'kilometers')
assert ('summary' in route['trip'] and 'length' in route['trip']['summary']
        and route['trip']['summary']['length'] > 9.)
assert ('legs' in route['trip'] and len(route['trip']['legs']) > 0)
assert ('maneuvers' in route['trip']['legs'][0]
        and len(route['trip']['legs'][0]['maneuvers']) > 0)
assert ('instruction' in route['trip']['legs'][0]['maneuvers'][0])
assert (route['trip']['legs'][0]['maneuvers'][0]['instruction'] ==
        u'Двигайтесь на восток по велосипедной дорожке.')
      self.answer(400, str(e))

  def do_GET(self):
    self.do(False)
  def do_POST(self):
    self.do(True)


#go off and wait for connections
if __name__ == '__main__':
  #check for a config file
  conf = {}
  try:
    with open(sys.argv[1]) as f:
      conf = json.load(f)
    valhalla.Configure(sys.argv[1])
    address = sys.argv[2].split('/')[-1].split(':')
    address[1] = int(address[1])
    address = tuple(address)
    os.environ['REDIS_HOST']
    os.environ['DATASTORE_URL']
  except Exception as e:
    sys.stderr.write('Problem with config file: {0}\n'.format(e)) 
    sys.exit(1)

  #setup the server
  SegmentMatcherHandler.protocol_version = 'HTTP/1.0'
  httpd = ThreadedHTTPServer(address, SegmentMatcherHandler)

  try:
    httpd.serve_forever()
Beispiel #5
0
def match(file_names, config, mode, report_levels, transition_levels,
          quantisation, inactivity, source, dest_dir):
    valhalla.Configure(config)
    segment_matcher = valhalla.SegmentMatcher()
    for file_name in file_names:
        #get out the data into a request payload
        traces = {}
        with open(file_name, 'r') as f:
            for line in f:
                uuid, tm, lat, lon, acc = tuple(line.strip().split(','))
                traces.setdefault(uuid, []).append({
                    'lat': float(lat),
                    'lon': float(lon),
                    'time': int(tm),
                    'accuracy': int(acc)
                })

        #do each trace in this file
        tiles = {}
        for uuid, all_points in traces.iteritems():
            #sort the points by time in case threads were competing on the file
            all_points.sort(key=lambda v: v['time'])
            threshold_sec = 15

            #find the points where inactivity occurs, these are the time windows of a particular vehicles trace
            windows = []
            for i, point in enumerate(all_points):
                if i == 0 or point['time'] - all_points[
                        i - 1]['time'] > inactivity:
                    windows.append(i)

            #for each window, the last one is just an end marker
            for idx, i in enumerate(windows):
                #skip short traces, last one is just an end marker
                j = windows[idx +
                            1] if idx + 1 < len(windows) else len(all_points)
                if j - i < 2:
                    continue

                #get the matches for it
                points = all_points[i:j]
                trace = {
                    'uuid': uuid,
                    'trace': points,
                    'match_options': {
                        'mode': mode
                    }
                }
                try:
                    match_str = segment_matcher.Match(
                        json.dumps(trace, separators=(',', ':')))
                    match = json.loads(match_str)
                    report = reporter_service.report(match, trace,
                                                     threshold_sec,
                                                     report_levels,
                                                     transition_levels)
                except (KeyboardInterrupt, SystemExit) as e:
                    raise e
                except:
                    logger.error(
                        'Failed to report trace with uuid %s from file %s' %
                        (uuid, file_name))
                    continue

                #weed out the usable segments and then send them off to the time tiles
                buckets = (points[-1]['time'] -
                           points[0]['time']) / quantisation + 1
                segments = [
                    r for r in report['datastore']['reports']
                    if r['t0'] > 0 and r['t1'] > 0 and r['t1'] -
                    r['t0'] > .5 and r['length'] > 0 and r['queue_length'] >= 0
                ]
                for r in segments:
                    duration = int(round(r['t1'] - r['t0']))
                    start = int(math.floor(r['t0']))
                    end = int(math.ceil(r['t1']))
                    min_bucket = int(start / quantisation)
                    max_bucket = int(end / quantisation)
                    diff = max_bucket - min_bucket
                    if diff > buckets:
                        logger.error(
                            'Segment spans %d buckets but should be %d buckets or less for uuid %s in file %s'
                            % (diff, buckets, uuid, file_name))
                        continue
                    for b in range(min_bucket, max_bucket + 1):
                        tile_level = str(get_tile_level(r['id']))
                        tile_index = str(get_tile_index(r['id']))
                        tile_file_name = dest_dir + os.sep + str(
                            b * quantisation) + '_' + str(
                                (b + 1) * quantisation -
                                1) + os.sep + tile_level + os.sep + tile_index
                        s = [
                            str(r['id']),
                            str(r.get('next_id', INVALID_SEGMENT_ID)),
                            str(duration), '1',
                            str(r['length']),
                            str(r['queue_length']),
                            str(start),
                            str(end), source,
                            mode.upper()
                        ]
                        tiles.setdefault(tile_file_name,
                                         []).append(','.join(s) + os.linesep)

        #append to a file
        for tile_file_name, tile in tiles.iteritems():
            try:
                os.makedirs(os.sep.join(tile_file_name.split(os.sep)[:-1]))
            except (KeyboardInterrupt, SystemExit) as e:
                raise e
            except:
                pass
            serialized = ''.join(tile)
            with open(tile_file_name, 'a', len(serialized)) as f:
                f.write(serialized)

        #TODO: return the stats part so we can merge them together later on
        logger.info('Finished matching %d traces in %s' %
                    (len(traces), file_name))