def test_files(self, session): today = util.utcnow().date() rows = [ dict(time=today, lat=12.345, lon=12.345), dict(time=today, lat=0, lon=12.345), dict(time=today, lat=-10.000, lon=-11.000), ] for row in rows: lat, lon = DataMap.scale(row["lat"], row["lon"]) data = DataMap.shard_model(lat, lon)( grid=(lat, lon), created=row["time"], modified=row["time"] ) session.add(data) session.flush() lines = [] rows = 0 with util.selfdestruct_tempdir() as temp_dir: quaddir = os.path.join(temp_dir, "quadtrees") os.mkdir(quaddir) shapes = os.path.join(temp_dir, "shapes") tiles = os.path.join(temp_dir, "tiles") for shard_id, shard in DataMap.shards().items(): filename = "map_%s.csv.gz" % shard_id filepath = os.path.join(temp_dir, filename) result = export_file(filepath, shard.__tablename__, _session=session) if not result: assert not os.path.isfile(filepath) continue rows += result with util.gzip_open(filepath, "r") as fd: written = fd.read() lines.extend([line.split(",") for line in written.split()]) encode_file(filename, temp_dir, quaddir) quadfolder = os.path.join(quaddir, "map_" + shard_id) assert os.path.isdir(quadfolder) self._check_quadtree(quadfolder) merge_files(quaddir, shapes) self._check_quadtree(shapes) render_tiles(shapes, tiles, 1, 2) assert sorted(os.listdir(tiles)) == ["0", "1", "2"] assert sorted(os.listdir(os.path.join(tiles, "0", "0"))) == [ "0.png", "*****@*****.**", ] assert rows == 18 assert len(lines) == 18 lats = [round(float(line[0]), 2) for line in lines] longs = [round(float(line[1]), 2) for line in lines] assert set(lats) == set([-10.0, 0.0, 12.35]) assert set(longs) == set([-11.0, 12.35])
def test_files(self, db, session): # pragma: no cover today = util.utcnow().date() rows = [ dict(time=today, lat=12.345, lon=12.345), dict(time=today, lat=0, lon=12.345), dict(time=today, lat=-10.000, lon=-11.000), ] for row in rows: lat, lon = DataMap.scale(row['lat'], row['lon']) data = DataMap.shard_model(lat, lon)(grid=(lat, lon), created=row['time'], modified=row['time']) session.add(data) session.flush() lines = [] rows = 0 with util.selfdestruct_tempdir() as temp_dir: quaddir = os.path.join(temp_dir, 'quadtrees') os.mkdir(quaddir) shapes = os.path.join(temp_dir, 'shapes') tiles = os.path.join(temp_dir, 'tiles') for shard_id, shard in DataMap.shards().items(): filename = 'map_%s.csv.gz' % shard_id filepath = os.path.join(temp_dir, filename) result = export_file(filepath, shard.__tablename__, _session=session) if not result: assert not os.path.isfile(filepath) continue rows += result with util.gzip_open(filepath, 'r') as fd: written = fd.read() lines.extend([line.split(',') for line in written.split()]) encode_file(filename, temp_dir, quaddir) quadfolder = os.path.join(quaddir, 'map_' + shard_id) assert os.path.isdir(quadfolder) self._check_quadtree(quadfolder) merge_files(quaddir, shapes) self._check_quadtree(shapes) render_tiles(shapes, tiles, 1, 2) assert (sorted(os.listdir(tiles)) == ['0', '1', '2']) assert (sorted(os.listdir(os.path.join( tiles, '0', '0'))) == ['0.png', '*****@*****.**']) assert rows == 18 assert len(lines) == 18 assert (set([round(float(l[0]), 2) for l in lines]) == set([-10.0, 0.0, 12.35])) assert (set([round(float(l[1]), 2) for l in lines]) == set([-11.0, 12.35]))
def test_files(self, db_rw, session): today = util.utcnow().date() rows = [ dict(time=today, lat=12.345, lon=12.345), dict(time=today, lat=0, lon=12.345), dict(time=today, lat=-10.000, lon=-11.000), ] for row in rows: lat, lon = DataMap.scale(row['lat'], row['lon']) data = DataMap.shard_model(lat, lon)( grid=(lat, lon), created=row['time'], modified=row['time']) session.add(data) session.flush() lines = [] rows = 0 db_url = str(db_rw.engine.url) with util.selfdestruct_tempdir() as temp_dir: quaddir = os.path.join(temp_dir, 'quadtrees') os.mkdir(quaddir) shapes = os.path.join(temp_dir, 'shapes') tiles = os.path.join(temp_dir, 'tiles') for shard_id, shard in DataMap.shards().items(): filename = 'map_%s.csv.gz' % shard_id filepath = os.path.join(temp_dir, filename) result = export_file( db_url, filepath, shard.__tablename__, _session=session) if not result: assert not os.path.isfile(filepath) continue rows += result with util.gzip_open(filepath, 'r') as fd: written = fd.read() lines.extend([line.split(',') for line in written.split()]) encode_file(filename, temp_dir, quaddir, DATAMAPS_DIR) quadfolder = os.path.join(quaddir, 'map_' + shard_id) assert os.path.isdir(quadfolder) self._check_quadtree(quadfolder) merge_files(quaddir, shapes, DATAMAPS_DIR) self._check_quadtree(shapes) render_tiles(shapes, tiles, 1, 2, DATAMAPS_DIR, PNGQUANT) assert (sorted(os.listdir(tiles)) == ['0', '1', '2']) assert (sorted(os.listdir(os.path.join(tiles, '0', '0'))) == ['0.png', '*****@*****.**']) assert rows == 36 assert len(lines) == 36 assert (set([round(float(l[0]), 2) for l in lines]) == set([-10.0, 0.0, 12.35])) assert (set([round(float(l[1]), 2) for l in lines]) == set([-11.0, 12.35]))
def _add(self, entries): for lat, lon, time in entries: lat, lon = DataMap.scale(lat, lon) self.session.add( DataMap.shard_model(lat, lon)(grid=(lat, lon), created=time, modified=time)) self.session.flush()
def _queue(self, pairs): grids = defaultdict(list) for lat, lon in pairs: lat, lon = DataMap.scale(lat, lon) shard_id = DataMap.shard_id(lat, lon) grids[shard_id].append(encode_datamap_grid(lat, lon)) for shard_id, values in grids.items(): queue = self.celery_app.data_queues['update_datamap_' + shard_id] queue.enqueue(list(values))
def _queue(self, pairs): grids = defaultdict(list) for lat, lon in pairs: lat, lon = DataMap.scale(lat, lon) shard_id = DataMap.shard_id(lat, lon) grids[shard_id].append(encode_datamap_grid(lat, lon)) for shard_id, values in grids.items(): queue = self.celery_app.data_queues['update_datamap_' + shard_id] queue.enqueue(list(values), json=False)
def test_one(self, celery, session): lat = 1.234567 lon = 2.345678 shard_id = DataMap.shard_id(*DataMap.scale(lat, lon)) self._queue(celery, [(lat, lon)]) update_datamap.delay(shard_id=shard_id).get() grids = session.query(DataMap.shards()[shard_id]).all() assert len(grids) == 1 self._check_position(grids[0], 1.235, 2.346) assert grids[0].created == self.today assert grids[0].modified == self.today
def test_one(self): lat = 1.234567 lon = 2.345678 shard_id = DataMap.shard_id(*DataMap.scale(lat, lon)) self._queue([(lat, lon)]) update_datamap.delay(shard_id=shard_id).get() grids = self.session.query(DataMap.shards()[shard_id]).all() self.assertEqual(len(grids), 1) self._check_position(grids[0], 1.235, 2.346) self.assertEqual(grids[0].created, self.today) self.assertEqual(grids[0].modified, self.today)
def test_files(self): today = util.utcnow().date() rows = [ dict(time=today, lat=12.345, lon=12.345), dict(time=today, lat=0, lon=12.345), dict(time=today, lat=-10.000, lon=-11.000), ] for row in rows: lat, lon = DataMap.scale(row["lat"], row["lon"]) data = DataMap.shard_model(lat, lon)(grid=(lat, lon), created=row["time"], modified=row["time"]) self.session.add(data) self.session.flush() lines = [] rows = 0 with util.selfdestruct_tempdir() as temp_dir: quaddir = os.path.join(temp_dir, "quadtrees") os.mkdir(quaddir) shapes = os.path.join(temp_dir, "shapes") tiles = os.path.join(temp_dir, "tiles") for shard_id, shard in DATAMAP_SHARDS.items(): filename = "map_%s.csv.gz" % shard_id filepath = os.path.join(temp_dir, filename) result = export_file(None, filepath, shard.__tablename__, _db_rw=_make_db(), _session=self.session) if not result: self.assertFalse(os.path.isfile(filepath)) continue rows += result with util.gzip_open(filepath, "r") as fd: written = fd.read() lines.extend([line.split(",") for line in written.split()]) encode_file(filename, temp_dir, quaddir, DATAMAPS_DIR) quadfolder = os.path.join(quaddir, "map_" + shard_id) self.assertTrue(os.path.isdir(quadfolder)) self._check_quadtree(quadfolder) merge_files(quaddir, shapes, DATAMAPS_DIR) self._check_quadtree(shapes) render_tiles(shapes, tiles, 1, 2, DATAMAPS_DIR, PNGQUANT) self.assertEqual(sorted(os.listdir(tiles)), ["0", "1", "2"]) self.assertEqual(sorted(os.listdir(os.path.join(tiles, "0", "0"))), ["0.png", "*****@*****.**"]) self.assertEqual(rows, 36) self.assertEqual(len(lines), 36) self.assertEqual(set([round(float(l[0]), 2) for l in lines]), set([-10.0, 0.0, 12.35])) self.assertEqual(set([round(float(l[1]), 2) for l in lines]), set([-11.0, 12.35]))
def test_update(self): lat = 1.0 lon = 2.0 shard_id = DataMap.shard_id(*DataMap.scale(lat, lon)) self._add([(lat, lon, self.yesterday)]) self._queue([(lat, lon)]) update_datamap.delay(shard_id=shard_id).get() grids = self.session.query(DataMap.shards()[shard_id]).all() self.assertEqual(len(grids), 1) self._check_position(grids[0], 1.0, 2.0) self.assertEqual(grids[0].created, self.yesterday) self.assertEqual(grids[0].modified, self.today)
def test_update(self, celery, session): lat = 1.0 lon = 2.0 shard_id = DataMap.shard_id(*DataMap.scale(lat, lon)) self._add(session, [(lat, lon, self.yesterday)]) self._queue(celery, [(lat, lon)]) update_datamap.delay(shard_id=shard_id).get() grids = session.query(DataMap.shards()[shard_id]).all() assert len(grids) == 1 self._check_position(grids[0], 1.0, 2.0) assert grids[0].created == self.yesterday assert grids[0].modified == self.today
def _check_position(self, stat, lat, lon): self.assertEqual(stat.grid, DataMap.scale(lat, lon))
def _one(self, lat, lon, time): lat, lon = DataMap.scale(lat, lon) return DataMap.shard_model(lat, lon)( grid=(lat, lon), created=time, modified=time)
def _check_position(self, stat, lat, lon): assert stat.grid == DataMap.scale(lat, lon)
def test_scale(self): self.assertEqual(DataMap.scale(-1.12345678, 2.23456789), (-1123, 2235))
def test_scale(self): assert DataMap.scale(-1.12345678, 2.23456789) == (-1123, 2235)
def _one(self, lat, lon, time): lat, lon = DataMap.scale(lat, lon) return DataMap.shard_model(lat, lon)(grid=(lat, lon), created=time, modified=time)
def __init__(self, lat, lon): self.grid = encode_datamap_grid(*DataMap.scale(lat, lon)) self.num = 0
def _add(self, entries): for lat, lon, time in entries: lat, lon = DataMap.scale(lat, lon) self.session.add(DataMap.shard_model(lat, lon)( grid=(lat, lon), created=time, modified=time)) self.session.flush()