def test_stable(self): points1 = random_points(10123, -170234, 1) points2 = random_points(10123, -170234, 1) points3 = random_points(10124, -170234, 1) self.assertEqual(points1, points2) self.assertNotEqual(points1, points3) self.assertNotEqual(points2, points3)
def test_stable(self): points1 = random_points(10123, -170234, 1) points2 = random_points(10123, -170234, 1) points3 = random_points(10124, -170234, 1) assert points1 == points2 assert points1 != points3 assert points2 != points3
def test_num(self): assert len(random_points(1, -2, 20)) == 0 assert len(random_points(1, -2, 12)) == 0 assert len(random_points(1, -2, 11)) == 1 assert len(random_points(1, -2, 10)) == 1 assert len(random_points(1, -2, 9)) == 2 assert len(random_points(1, -2, 2)) == 5 assert len(random_points(1, -2, 1)) == 6 assert len(random_points(1, -2, 0)) == 6 assert len(random_points(1, -2, -1)) == 6 assert len(random_points(1, -2, -10)) == 6
def test_num(self): self.assertEqual(len(random_points(1, -2, 20)), 2) self.assertEqual(len(random_points(1, -2, 6)), 2) self.assertEqual(len(random_points(1, -2, 5)), 2) self.assertEqual(len(random_points(1, -2, 4)), 4) self.assertEqual(len(random_points(1, -2, 1)), 10) self.assertEqual(len(random_points(1, -2, 0)), 12) self.assertEqual(len(random_points(1, -2, -1)), 12) self.assertEqual(len(random_points(1, -2, -10)), 12)
def test_num(self): assert len(random_points(1, -2, 20)) == 2 assert len(random_points(1, -2, 6)) == 2 assert len(random_points(1, -2, 5)) == 2 assert len(random_points(1, -2, 4)) == 4 assert len(random_points(1, -2, 1)) == 10 assert len(random_points(1, -2, 0)) == 12 assert len(random_points(1, -2, -1)) == 12 assert len(random_points(1, -2, -10)) == 12
def export_file(filename, tablename, _db=None, _session=None): today = util.utcnow().date() one_year_ago = today - timedelta(days=365) one_year_ago = one_year_ago.strftime('%Y-%m-%d') # this is executed in a worker process stmt = text('''\ SELECT `grid`, CAST(ROUND(DATEDIFF(CURDATE(), `modified`) / 30) AS UNSIGNED) as `num` FROM {tablename} WHERE modified >= '{modified}' LIMIT :limit OFFSET :offset '''.format(tablename=tablename, modified=one_year_ago).replace('\n', ' ')) db = configure_db('ro', _db=_db) offset = 0 limit = 200000 result_rows = 0 with util.gzip_open(filename, 'w', compresslevel=2) as fd: with db_worker_session(db, commit=False) as session: if _session is not None: # testing hook session = _session while True: result = session.execute( stmt.bindparams(limit=limit, offset=offset)) rows = result.fetchall() result.close() if not rows: break lines = [] extend = lines.extend for row in rows: lat, lon = decode_datamap_grid(row.grid) extend(random_points(lat, lon, row.num)) fd.writelines(lines) result_rows += len(lines) offset += limit if not result_rows: os.remove(filename) db.close() return result_rows
def export_file(filename, tablename, _db=None, _session=None): # this is executed in a worker process stmt = text('''\ SELECT `grid`, CAST(ROUND(DATEDIFF(CURDATE(), `modified`) / 30) AS UNSIGNED) as `num` FROM {tablename} WHERE `grid` > :grid ORDER BY `grid` LIMIT :limit '''.format(tablename=tablename).replace('\n', ' ')) db = configure_db('ro', transport='sync', _db=_db) min_grid = b'' limit = 200000 result_rows = 0 with util.gzip_open(filename, 'w', compresslevel=2) as fd: with db_worker_session(db, commit=False) as session: if _session is not None: # testing hook session = _session while True: result = session.execute( stmt.bindparams(limit=limit, grid=min_grid)) rows = result.fetchall() result.close() if not rows: break lines = [] extend = lines.extend for row in rows: lat, lon = decode_datamap_grid(row.grid) extend(random_points(lat, lon, row.num)) fd.writelines(lines) result_rows += len(lines) min_grid = rows[-1].grid if not result_rows: os.remove(filename) db.close() return result_rows
def export_file(db_url, filename, tablename, _db_rw=None, _session=None): # this is executed in a worker process stmt = text('''\ SELECT `grid`, CAST(ROUND(DATEDIFF(CURDATE(), `modified`) / 30) AS UNSIGNED) as `num` FROM {tablename} LIMIT :limit OFFSET :offset '''.format(tablename=tablename).replace('\n', ' ')) db = configure_db(db_url, _db=_db_rw) offset = 0 limit = 200000 result_rows = 0 with util.gzip_open(filename, 'w', compresslevel=2) as fd: with db_worker_session(db, commit=False) as session: if _session is not None: # testing hook session = _session while True: result = session.execute( stmt.bindparams(limit=limit, offset=offset)) rows = result.fetchall() result.close() if not rows: break lines = [] extend = lines.extend for row in rows: lat, lon = decode_datamap_grid(row.grid) extend(random_points(lat, lon, row.num)) fd.writelines(lines) result_rows += len(lines) offset += limit if not result_rows: os.remove(filename) db.engine.pool.dispose() return result_rows
def test_null(self): points = random_points(0, 0, 20) self.assertEqual(type(points), list) self.assertEqual(len(points), 2)
def test_large(self): random_points(90000, 180000, 1) random_points(-90000, -180000, 1)
def test_null(self): points = random_points(0, 0, 11) assert type(points) is list assert len(points) == 1