Esempio n. 1
0
def test_redis_downloader_and_cache():
    """
    Test Redis downloader and cache functions.
    """
    async def images(tiles, num_workers):
        tile = partial(Tile, error=None, offset=None)
        for t in tiles:
            yield tile(url=t.url, img='img')

    async def as_list(tiles):
        return [t async for t in tiles]

    client = mock.MagicMock()
    client.get.side_effect = ['c-img1', None, 'c-img3']
    downloader = redis_downloader(client, downloader=images, timeout=10)
    assert caching_downloader == downloader.func

    urls = ['url1', 'url2', 'url3']
    tiles = [Tile(url, None, None, None) for url in urls]

    loop = asyncio.get_event_loop()
    tiles = downloader(tiles, 2)
    result = loop.run_until_complete(as_list(tiles))

    args = [v[0][0] for v in client.get.call_args_list]
    assert ['url1', 'url2', 'url3'] == args

    args = sorted(v[0] for v in client.setex.call_args_list)
    assert 3 == len(args)
    assert ('url1', 10, 'c-img1') == args[0]
    assert ('url2', 10, 'img') == args[1]
    assert ('url3', 10, 'c-img3') == args[2]
Esempio n. 2
0
def refresh_map(widget):
    """
    Refresh map when map widget refresh event is set.

    This is asyncio coroutine.

    :param widget: Map widget.
    """
    event = widget.refresh_map
    map = widget.map

    # use redis to cache map tiles
    client = redis.Redis('localhost')
    downloader = redis_downloader(client)
    render_map = functools.partial(
        geotiler.render_map_async, downloader=downloader
    )

    while True:
        yield from event.wait()
        event.clear()

        logger.debug('fetching map image...')
        img = yield from render_map(map)
        logger.debug('got map image')

        pixmap = QPixmap.fromImage(ImageQt(img))
        widget.map_layer.setPixmap(pixmap)
        scroll_map(widget, map.center)
Esempio n. 3
0
def test_redis_downloader_and_cache():
    """
    Test Redis downloader and cache functions.
    """
    async def images(tiles, num_workers):
        tile = partial(Tile, error=None, offset=None)
        for t in tiles:
            yield tile(url=t.url, img='img')

    async def as_list(tiles):
        return [t async for t in tiles]

    client = mock.MagicMock()
    client.get.side_effect = ['c-img1', None, 'c-img3']
    downloader = redis_downloader(client, downloader=images, timeout=10)
    assert caching_downloader == downloader.func

    urls = ['url1', 'url2', 'url3']
    tiles = [Tile(url, None, None, None) for url in urls]

    loop = asyncio.get_event_loop()
    tiles = downloader(tiles, 2)
    result = loop.run_until_complete(as_list(tiles))

    args = [v[0][0] for v in client.get.call_args_list]
    assert ['url1', 'url2', 'url3'] == args

    args = sorted(v[0] for v in client.setex.call_args_list)
    assert 3 == len(args)
    assert ('url1', 'c-img1', 10) == args[0]
    assert ('url2', 'img', 10) == args[1]
    assert ('url3', 'c-img3', 10) == args[2]
Esempio n. 4
0
    def test_redis_downloader(self):
        """
        Test creating Redis downloader
        """
        @asyncio.coroutine
        def images(urls):
            return 'img1', 'img2', 'img3'

        client = mock.MagicMock()
        client.get.side_effect = ['img1', 'img2', 'img3']
        downloader = redis_downloader(client, downloader=images, timeout=10)
        self.assertEqual(caching_downloader, downloader.func)

        task = downloader(['url1', 'url2', 'url3'])
        loop = asyncio.get_event_loop()
        result = loop.run_until_complete(task)

        args = [v[0][0] for v in client.get.call_args_list]
        self.assertEqual(['url1', 'url2', 'url3'], args)

        args = sorted(v[0] for v in client.setex.call_args_list)
        self.assertEqual(3, len(args))
        self.assertEqual(('url1', 'img1', 10), args[0])
        self.assertEqual(('url2', 'img2', 10), args[1])
        self.assertEqual(('url3', 'img3', 10), args[2])
Esempio n. 5
0
async def refresh_map(widget):
    """
    Refresh map when map widget refresh event is set.

    This is asyncio coroutine.

    :param widget: Map widget.
    """
    event = widget.refresh_map
    map = widget.map

    # use redis to cache map tiles
    client = redis.Redis('localhost')
    downloader = redis_downloader(client)
    render_map = functools.partial(
        geotiler.render_map_async, downloader=downloader
    )
    fetch_tiles = functools.partial(
        geotiler.fetch_tiles, downloader=downloader
    )

    pixmap = QPixmap(*map.size)

    while True:
        await event.wait()
        event.clear()

        logger.debug('fetching map image...')

        img = await render_map(map)
        pixmap.convertFromImage(ImageQt(img))

        # TODO: use `fetch_tiles` to update map as tiles arrive, but try to
        # avoid `setPixmap` within the loop.
        # tiles = fetch_tiles(map)
        # async for tile in tiles:
        #     painter = QPainter(pixmap)
        #     img = QImage()
        #     img.loadFromData(tile.img)
        #     painter.drawImage(*tile.offset, img)
        #     painter.end()
        #     widget.map_layer.setPixmap(pixmap)

        scroll_map(widget, map.center)
        widget.map_layer.setPixmap(pixmap)

        logger.debug('got map image')
Esempio n. 6
0
async def refresh_map(widget):
    """
    Refresh map when map widget refresh event is set.

    This is asyncio coroutine.

    :param widget: Map widget.
    """
    event = widget.refresh_map
    map = widget.map

    # use redis to cache map tiles
    client = redis.Redis('localhost')
    downloader = redis_downloader(client)
    render_map = functools.partial(
        geotiler.render_map_async, downloader=downloader
    )
    fetch_tiles = functools.partial(
        geotiler.fetch_tiles, downloader=downloader
    )

    pixmap = QPixmap(*map.size)

    while True:
        await event.wait()
        event.clear()

        logger.debug('fetching map image...')

        img = await render_map(map)
        pixmap.convertFromImage(ImageQt(img))

        # TODO: use `fetch_tiles` to update map as tiles arrive, but try to
        # avoid `setPixmap` within the loop.
        # tiles = fetch_tiles(map)
        # async for tile in tiles:
        #     painter = QPainter(pixmap)
        #     img = QImage()
        #     img.loadFromData(tile.img)
        #     painter.drawImage(*tile.offset, img)
        #     painter.end()
        #     widget.map_layer.setPixmap(pixmap)

        scroll_map(widget, map.center)
        widget.map_layer.setPixmap(pixmap)

        logger.debug('got map image')
Esempio n. 7
0
Requires running `gpsd` daemon.
"""

import asyncio
import functools
import logging
import json
import redis

logging.basicConfig(level=logging.DEBUG)

import geotiler
from geotiler.cache import redis_downloader

client = redis.Redis('localhost')
downloader = redis_downloader(client)
render_map_async = functools.partial(geotiler.render_map_async,
                                     downloader=downloader)


async def read_gps(queue):
    """
    Read location data from `gpsd` daemon.
    """
    reader, writer = await asyncio.open_connection(port=2947)
    writer.write(b'?WATCH={"enable":true,"json":true}\n')
    while True:
        line = await reader.readline()
        data = json.loads(line.decode())
        if 'lon' in data:
            await queue.put((data['lon'], data['lat']))
Esempio n. 8
0
"""
GeoTiler example to use Redis based cache for map tiles.
"""

import functools
import redis

import geotiler
from geotiler.cache import redis_downloader

import logging
logging.basicConfig(level=logging.DEBUG)

# create tile downloader with Redis client as cache
client = redis.Redis('localhost')
downloader = redis_downloader(client)

# use map renderer with new downloader
render_map = functools.partial(geotiler.render_map, downloader=downloader)

bbox = 11.78560, 46.48083, 11.79067, 46.48283
mm = geotiler.Map(extent=bbox, zoom=18)

# render the map for the first time...
img = render_map(mm)

# ... and second time to demonstrate use of the cache
img = render_map(mm)

# show some recent keys
print('recent cache keys {}'.format(client.keys()[:10]))
Esempio n. 9
0
def add_reference_files(config,
                        checkpoint,
                        reference_database,
                        in_files,
                        dataset_name,
                        default_route_type,
                        extract_route_type,
                        expand_paths,
                        skip_existing=False):
    engine, Routes, OSMImages = get_engine_and_model(reference_database,
                                                     train=False)
    Session = sessionmaker(bind=engine)
    session = Session()
    client = redis.Redis(**config['redis'])
    downloader = redis_downloader(client)
    render_map = functools.partial(geotiler.render_map, downloader=downloader)

    click.echo(f'Loading model from: {checkpoint}!')
    embedding_model = tf.keras.models.load_model(checkpoint)

    in_files_prepared = []

    def show_item_gpx(item):
        if item is not None:
            return f'{item}'
        else:
            return ''

    click.echo('Adding GPX Files to Database:')
    with click.progressbar(in_files,
                           item_show_func=show_item_gpx,
                           show_pos=True) as bar:
        for p in bar:
            p = pathlib.Path(p)
            if expand_paths:
                p = p.absolute()
            if extract_route_type:
                try:
                    route_type = p.name.split('_')[-1].split('.')[0]
                except:
                    route_type = default_route_type
                else:
                    if route_type == '':
                        route_type = default_route_type
            else:
                route_type = default_route_type
            if skip_existing:
                if session.query(Routes).count() > 0:
                    continue
            else:
                delete_q = Routes.__table__.delete().where(
                    Routes.path == str(p))
                session.execute(delete_q)
                session.commit()
            route_entry = Routes(path=str(p),
                                 dataset=dataset_name,
                                 route_type=route_type,
                                 gpx_file=compress_gpx(p))
            session.add(route_entry)
            session.commit()
            in_files_prepared.append((p, route_entry.id))

    def show_item_image(item):
        if item is not None:
            return '{} [type: {}]'.format(str(item[0]), item[1])
        else:
            return ''

    opts = config['map_options']
    click.echo('Generating segement images:')
    with click.progressbar(in_files_prepared,
                           item_show_func=show_item_image,
                           show_pos=True) as bar:
        for (path, route_entry_id) in bar:
            create_images_reference(dbsession=session,
                                    db_model=OSMImages,
                                    config=config,
                                    embedding_model=embedding_model,
                                    gpx_file=path,
                                    route_id=route_entry_id,
                                    batch_size=config.get('apply').get(
                                        'batch_size', 16),
                                    render_map=render_map)
Esempio n. 10
0
def add_train_files(config,
                    in_files,
                    dataset_name,
                    default_route_type,
                    extract_route_type,
                    expand_paths,
                    skip_existing=False):
    engine, OSMImages = get_engine_and_model(**config['postgres'])
    Session = sessionmaker(bind=engine)
    session = Session()

    client = redis.Redis(**config['redis'])
    downloader = redis_downloader(client)
    render_map = functools.partial(geotiler.render_map, downloader=downloader)

    in_files_prepared = []
    for p in in_files:
        p = pathlib.Path(p)
        if expand_paths:
            p = p.absolute()
        if extract_route_type:
            try:
                route_type = p.name.split('_')[-1].split('.')[0]
            except:
                route_type = default_route_type
            else:
                if route_type == '':
                    route_type = default_route_type
        else:
            route_type = default_route_type
        in_files_prepared.append((p, route_type))

    def show_item(item):
        if item is not None:
            return '{} [type: {}]'.format(str(item[0]), item[1])
        else:
            return ''

    opts = config['map_options']
    with click.progressbar(in_files_prepared,
                           item_show_func=show_item,
                           show_pos=True) as bar:
        for (path, route_type) in bar:
            if skip_existing:
                count = session.query(OSMImages.id).filter(
                    OSMImages.origin == str(path)
                    and OSMImages.width == opts['width']
                    and OSMImages.height == opts['height']
                    and OSMImages.zoom == opts['zoom']
                    and OSMImages.show_route == opts['show_route']).count()
                if count > 0:
                    continue
            create_images_train(session,
                                OSMImages,
                                path,
                                route_type=route_type,
                                render_map=render_map,
                                show_route=bool(opts['show_route']),
                                zoom=opts['zoom'],
                                size=(opts['width'], opts['width']),
                                max_distance=opts['smoothing_dist'],
                                save_type=config['train']['save_type'],
                                dataset=dataset_name)
Esempio n. 11
0
def apply_model_to_file(config, gpx_file, ref_database, checkpoint):
    gpx_file = pathlib.Path(gpx_file)
    click.echo(f'Loading model from: {checkpoint}!')
    embedding_model = tf.keras.models.load_model(checkpoint)
    client = redis.Redis(**config['redis'])
    downloader = redis_downloader(client)
    render_map = functools.partial(geotiler.render_map, downloader=downloader)
    batch_size = config.get('apply').get('batch_size', 16)

    map_options = config['map_options']
    map_options['size'] = (map_options['width'], map_options['height'])
    click.echo(f'Generating segment images for the test gpx: {gpx_file}...')
    try:
        gpx = gpxpy.parse(gpx_file.open())
    except gpxpy.gpx.GPXXMLSyntaxException:
        click.echo(f'{gpx_file} is not a valid GPX file!')
        return
    segments = []
    for track_idx, track in enumerate(gpx.tracks):
        for segment_idx, segment in enumerate(track.segments):
            images, entries = [], []
            for img, info in generate_images_for_segment(
                    segment=segment,
                    size=map_options['size'],
                    zoom=map_options['zoom'],
                    max_distance=map_options['smoothing_dist'],
                    render_map=render_map,
                    show_route=map_options['show_route']):
                entry = {
                    'origin': gpx_file,
                    'track_idx': track_idx,
                    'segment_idx': segment_idx,
                    'image_raw': img
                }
                entry = {**entry, **info}
                mm = geotiler.Map(extent=(info['p_0_long'], info['p_0_lat'],
                                          info['p_1_long'], info['p_1_lat']),
                                  zoom=map_options['zoom'])
                entry['point'] = GPXTrackPoint(latitude=mm.center[1],
                                               longitude=mm.center[0])
                img_embedding = tf.keras.preprocessing.image.img_to_array(
                    img) / 255.
                images.append(img_embedding)
                entries.append(entry)
                if len(images) == batch_size:
                    reconstructed_images, mu, log_var = apply_model(
                        embedding_model, images, batch_size, fill_up=False)
                    images = [
                        np.array((mu_i, log_var_i))
                        for mu_i, log_var_i in zip(log_var, mu)
                    ]
                    for i, (img_encoded, img_decoded, entry) in enumerate(
                            zip(images, reconstructed_images, entries)):
                        if i == 0:
                            map_options['encoding_shape'] = img_encoded.shape
                        entry['image_encoded'] = img_encoded
                        entry['image_decoded'] = img_decoded.numpy()
                        segments.append(entry)
                    images, entries = [], []
            if len(entries) > 0:
                reconstructed_images, mu, log_var = apply_model(
                    embedding_model, images, batch_size, fill_up=False)
                images = [
                    np.array((mu_i, log_var_i))
                    for mu_i, log_var_i in zip(log_var, mu)
                ]
                for img_encoded, img_decoded, entry in zip(
                        images, reconstructed_images, entries):
                    entry['image_encoded'] = img_encoded
                    entry['image_decoded'] = img_decoded.numpy()
                    segments.append(entry)
    test_images = np.asarray([entry['image_encoded'] for entry in segments])
    click.echo(f'Loading date from reference database: {ref_database}...')
    engine, Routes, Segments = get_engine_and_model(ref_database, train=False)
    Session = sessionmaker(bind=engine)
    session = Session()
    images = []
    route_ids = []
    segment_ids = []
    for seg in session.query(Segments.image, Segments.origin, Segments.id):
        images.append(
            bytes2array(seg.image).reshape(map_options['encoding_shape']))
        route_ids.append(seg.origin)
        segment_ids.append(seg.id)
    images = np.asarray(images)
    route_ids = np.asarray(route_ids)
    segment_ids = np.asarray(segment_ids)
    click.echo(f'Calculating Bhattacharyya distance...')

    def bhattacharyya_distance(vec_a, vec_b):
        mu_a, var_a = vec_a[:len(vec_a) // 2], np.exp(vec_a[len(vec_a) // 2:])
        mu_b, var_b = vec_b[:len(vec_b) // 2], np.exp(vec_b[len(vec_b) // 2:])
        result = 0.25 * np.log(0.25 *
                               (var_a / var_b + var_b / var_a + 2)) + 0.25 * (
                                   (mu_a - mu_b)**2 / (var_a + var_b))
        return np.mean(result)

    test_images_reshaped = test_images.reshape(
        test_images.shape[0], test_images.shape[1] * test_images.shape[2])
    images_reshaped = images.reshape(images.shape[0],
                                     images.shape[1] * images.shape[2])
    matrix = cdist(test_images_reshaped,
                   images_reshaped,
                   metric=bhattacharyya_distance)
    click.echo(
        f'Determine best matching route via aggregation of segments with `{config["apply"]["aggregation"]}`...'
    )
    segments_sim, matrix_sim, compressed_gpx_file = generate_segements_sim(
        config, session, Segments, Routes, segment_ids, route_ids, matrix)
    click.echo(f'Preparing images of similar segments...')
    segments_sim_global = generate_img_sim_global(config, session, Segments,
                                                  matrix, segment_ids,
                                                  embedding_model, render_map)

    fig_test = create_plotly_fig_with_line(config, gpx)
    fig_sim = create_plotly_fig_with_line(
        config, gpxpy.parse(decompress_gpx(compressed_gpx_file)))
    session.close()
    return fig_test, fig_sim, segments, segments_sim, segments_sim_global, matrix_sim