Ejemplo n.º 1
0
    def _ts_parse(self,
                  reqs,
                  expected_layers,
                  expected_tables,
                  expected_funcs,
                  extra_cases=None):
        cases = [] if not extra_cases else list(extra_cases)
        cases.append(Case('my_id', 'my_query;', reqs=reqs))
        ts = Tileset(parsed_data(cases))
        self.assertEqual(ts.attribution, 'test_attribution')
        self.assertEqual(ts.bounds, 'test_bounds')
        self.assertEqual(ts.center, 'test_center')
        self.assertEqual(
            ts.defaults,
            dict(srs='test_srs', datasource=dict(srid='test_datasource')))
        self.assertEqual(ts.id, 'id1')
        self.assertEqual(ts.maxzoom, 'test_maxzoom')
        self.assertEqual(ts.minzoom, 'test_minzoom')
        self.assertEqual(ts.name, 'test_name')
        self.assertEqual(ts.pixel_scale, 'test_pixel_scale')
        self.assertEqual(ts.version, 'test_version')

        self.assertEqual(len(ts.layers), len(cases))
        layer = ts.layers_by_id['my_id']
        self.assertEqual(layer.id, 'my_id')
        self.assertEqual(layer.requires_layers, expected_layers)
        self.assertEqual(layer.requires_tables, expected_tables)
        self.assertEqual(layer.requires_functions, expected_funcs)
        self.assertEqual(layer.buffer_size, 10)

        # This test can be deleted once we remove the deprecated property in some future version
        with warnings.catch_warnings():
            warnings.filterwarnings('ignore', category=DeprecationWarning)
            self.assertEqual(layer.requires, expected_layers)
Ejemplo n.º 2
0
def collect_sql(tileset_filename, parallel=False, nodata=False):
    """If parallel is True, returns a sql value that must be executed first,
        and a lists of sql values that can be ran in parallel.
        If parallel is False, returns a single sql string.
        nodata=True replaces all "/* DELAY_MATERIALIZED_VIEW_CREATION */"
        with the "WITH NO DATA" SQL."""
    tileset = Tileset.parse(tileset_filename)

    run_first = get_slice_language_tags(tileset.languages)
    run_last = ''  # at this point we don't have any SQL to run at the end

    parallel_sql = []
    for layer in tileset.layers:
        schemas = '\n\n'.join(
            (to_sql(v, layer, nodata) for v in layer.schemas))
        parallel_sql.append(f"""\
DO $$ BEGIN RAISE NOTICE 'Processing layer {layer.id}'; END$$;

{schemas}

DO $$ BEGIN RAISE NOTICE 'Finished layer {layer.id}'; END$$;
""")

    if parallel:
        return run_first, parallel_sql, run_last
    else:
        return run_first + '\n'.join(parallel_sql) + run_last
Ejemplo n.º 3
0
    def __init__(self,
                 tileset: Union[str, Tileset],
                 postgis_ver: str,
                 zoom: Union[None, str, int],
                 x: Union[None, str, int],
                 y: Union[None, str, int],
                 layer_ids: List[str] = None,
                 exclude_layers=False,
                 key_column=False,
                 gzip: Union[int, bool] = False,
                 use_feature_id: bool = None,
                 test_geometry=False,
                 order_layers: bool = False,
                 extent=4096):
        if isinstance(tileset, str):
            self.tileset = Tileset.parse(tileset)
        else:
            self.tileset = tileset
        self.extent = extent
        self.pixel_width = self.tileset.pixel_scale
        self.pixel_height = self.tileset.pixel_scale
        self.key_column = key_column
        self.gzip = gzip
        self.test_geometry = test_geometry
        self.order_layers = order_layers
        self.set_layer_ids(layer_ids, exclude_layers)
        self.zoom = zoom
        self.x = x
        self.y = y

        # extract the actual version number
        # ...POSTGIS='2.4.8 r17696'...
        m = re.search(r'POSTGIS="([^"]+)"', postgis_ver)
        ver = m[1] if m else postgis_ver
        m = re.match(
            r'^(?P<major>\d+)\.(?P<minor>\d+)'
            r'(\.(?P<patch>\d+)(?P<suffix>[^ ]*)?)?', ver)
        if not m:
            raise ValueError(
                f"Unparseable PostGIS version string '{postgis_ver}'")
        major = int(m['major'])
        minor = int(m['minor'])
        patch = int(m['patch']) if m['patch'] else 0
        if m['suffix'] != '':
            patch -= 1
        self.postgis_ver = (major, minor, patch)

        if self.postgis_ver < (3, 0):
            if use_feature_id:
                raise ValueError(
                    'Feature ID is only available in PostGIS v3.0+')
            self.use_feature_id = False
            self.tile_envelope = 'TileBBox'
        else:
            self.tile_envelope = 'ST_TileEnvelope'
            self.use_feature_id = True if use_feature_id is None else use_feature_id
        self.tile_envelope_margin = False
Ejemplo n.º 4
0
    async def generate(self, tileset, reset, auto_minmax,
                       pghost, pgport, dbname, user, password):
        ts = Tileset.parse(tileset)
        print(
            f'Connecting to PostgreSQL at {pghost}:{pgport}, db={dbname}, user={user}...')
        try:
            async with asyncpg.create_pool(
                database=dbname, host=pghost, port=pgport, user=user,
                password=password, min_size=1, max_size=1,
            ) as pool:
                async with pool.acquire() as conn:
                    mvt = MvtGenerator(
                        ts,
                        postgis_ver=await get_postgis_version(conn),
                        zoom='$1', x='$2', y='$3',
                    )
                    json_data = dict(vector_layers=await get_vector_layers(conn, mvt))
        except ConnectionError as err:
            print(f"Unable to connect to Postgres database: {err}")
            raise err

        # Convert tileset to the metadata object according to mbtiles 1.3 spec
        # https://github.com/mapbox/mbtiles-spec/blob/master/1.3/spec.md#content
        metadata = dict(
            # MUST
            name=os.environ.get('METADATA_NAME', ts.name),
            format="pbf",
            json=json.dumps(json_data, ensure_ascii=False, separators=(',', ':')),
            # SHOULD
            bounds=",".join((str(v) for v in ts.bounds)),
            center=",".join((str(v) for v in ts.center)),
            minzoom=os.environ.get('MIN_ZOOM', str(ts.minzoom)),
            maxzoom=os.environ.get('MAX_ZOOM', str(ts.maxzoom)),
            # MAY
            attribution=os.environ.get('METADATA_ATTRIBUTION', ts.attribution),
            description=os.environ.get('METADATA_DESCRIPTION', ts.description),
            version=os.environ.get('METADATA_VERSION', ts.version),
            # EXTRAS
            filesize=os.path.getsize(self.mbtiles),
        )

        bbox_str = os.environ.get('BBOX')
        if bbox_str:
            bbox = Bbox(bbox=bbox_str,
                        center_zoom=os.environ.get('CENTER_ZOOM', ts.center[2]))
            metadata["bounds"] = bbox.bounds_str()
            metadata["center"] = bbox.center_str()

        with sqlite3.connect(self.mbtiles) as conn:
            cursor = conn.cursor()
            if auto_minmax:
                metadata["minzoom"], metadata["maxzoom"] = get_minmax(cursor)
            update_metadata(cursor, metadata, reset)

        print("The metadata now contains these values:")
        self.print_all()
Ejemplo n.º 5
0
    async def generate(self, tileset, reset, auto_minmax, pghost, pgport,
                       dbname, user, password):
        ts = Tileset.parse(tileset)
        print(
            f'Connecting to PostgreSQL at {pghost}:{pgport}, db={dbname}, user={user}...'
        )
        try:
            async with asyncpg.create_pool(
                    database=dbname,
                    host=pghost,
                    port=pgport,
                    user=user,
                    password=password,
                    min_size=1,
                    max_size=1,
            ) as pool:
                async with pool.acquire() as conn:
                    mvt = MvtGenerator(
                        ts,
                        postgis_ver=await get_postgis_version(conn),
                        zoom='$1',
                        x='$2',
                        y='$3',
                    )
                    json_data = dict(
                        vector_layers=await get_vector_layers(conn, mvt))
        except ConnectionError as err:
            print(f'Unable to connect to Postgres database: {err}')
            raise err

        # Convert tileset to the metadata object according to mbtiles 1.3 spec
        # https://github.com/mapbox/mbtiles-spec/blob/master/1.3/spec.md#content
        metadata = dict(
            # MUST
            name=ts.name,
            format='pbf',
            json=json.dumps(json_data,
                            ensure_ascii=False,
                            separators=(',', ':')),
            # SHOULD
            bounds=','.join((str(v) for v in ts.bounds)),
            center=','.join((str(v) for v in ts.center)),
            minzoom=str(ts.minzoom),
            maxzoom=str(ts.maxzoom),
            # MAY
            attribution=ts.attribution,
            description=ts.description,
            version=ts.version,
            # EXTRAS
            id=ts.id,
        )

        self._update_metadata(metadata, auto_minmax, reset, self.mbtiles,
                              ts.center[2])
Ejemplo n.º 6
0
    def __init__(self, tileset: str, tests: List[str], test_all, layers: List[str],
                 zooms: List[int], dbname: str, pghost, pgport: str, user: str,
                 password: str, summary: bool, per_layer: bool, buckets: int,
                 save_to: Union[None, str, Path], compare_with: Union[None, str, Path],
                 key_column: bool, gzip: bool, disable_feature_ids: bool = None,
                 exclude_layers: bool = False, verbose: bool = None):
        self.tileset = Tileset.parse(tileset)
        self.dbname = dbname
        self.pghost = pghost
        self.pgport = pgport
        self.user = user
        self.password = password
        self.summary = summary
        self.buckets = buckets
        self.key_column = key_column
        self.gzip = gzip
        self.disable_feature_ids = disable_feature_ids
        self.verbose = verbose
        self.per_layer = per_layer
        self.save_to = Path(save_to) if save_to else None
        self.results = PerfRoot()

        if compare_with:
            path = Path(compare_with).resolve()
            with path.open('r', encoding='utf-8') as fp:
                self.old_run: PerfRoot = PerfRoot.from_dict(json.load(fp))
            since = round_td(dt.utcnow() - dt.fromisoformat(self.old_run.created))
            print(f"Comparing results with a previous run created {since} ago: {path}")
        else:
            self.old_run = None

        for test in tests:
            if test not in TEST_CASES:
                cases = '\n'.join(map(TestCase.fmt_table, TEST_CASES.values()))
                raise DocoptExit(f"Test '{test}' is not defined. "
                                 f"Available tests are:\n{cases}\n")
        if test_all:
            # Do this after validating individual tests, they are ignored but validated
            tests = [v for v in TEST_CASES.keys() if v != 'null']
        all_layers = [l["layer"]['id'] for l in self.tileset.layers]
        if layers and exclude_layers:
            # inverse layers list
            layers = [l for l in all_layers if l not in layers]
        elif not layers and per_layer:
            layers = all_layers
        # Keep the order, but ensure no duplicates
        self.layers = list(dict.fromkeys(layers))
        self.tests = list(dict.fromkeys(tests))
        self.zooms = list(dict.fromkeys(zooms))
Ejemplo n.º 7
0
    def _ts_overrides(layer: Optional[dict] = None,
                      override_ts: Optional[dict] = None,
                      override_layer: Optional[dict] = None,
                      env: Optional[dict] = None):
        data = parsed_data([Case('my_id', 'my_query;')])

        ts_data = data.data['tileset']
        if override_ts is not None:
            ts_data['overrides'] = override_ts
        if layer is not None:
            ts_data['layers'][0]['file'].data['layer'].update(layer)
        if override_layer is not None:
            ts_data['layers'][0].update(override_layer)

        return Tileset(data, getenv=env.get if env else None)
Ejemplo n.º 8
0
 def __init__(self,
              tileset,
              layer_ids=None,
              key_column=False,
              use_feature_id=True):
     if isinstance(tileset, str):
         self.tileset = Tileset.parse(tileset)
     else:
         self.tileset = tileset
     self.extent = 4096
     self.pixel_width = PIXEL_SCALE
     self.pixel_height = PIXEL_SCALE
     self.layers_ids = set(layer_ids or [])
     self.key_column = key_column
     self.use_feature_id = use_feature_id
Ejemplo n.º 9
0
    def __init__(self,
                 tileset: Union[str, Tileset],
                 postgis_ver: str,
                 zoom: Union[str, int],
                 x: Union[str, int],
                 y: Union[str, int],
                 layer_ids: List[str] = None,
                 exclude_layers=False,
                 key_column=False,
                 gzip: Union[int, bool] = False,
                 use_feature_id: bool = None,
                 test_geometry=False,
                 extent=4096):
        if isinstance(tileset, str):
            self.tileset = Tileset.parse(tileset)
        else:
            self.tileset = tileset
        self.extent = extent
        self.pixel_width = PIXEL_SCALE
        self.pixel_height = PIXEL_SCALE
        self.key_column = key_column
        self.gzip = gzip
        self.test_geometry = test_geometry
        self.set_layer_ids(layer_ids, exclude_layers)
        self.zoom = zoom
        self.x = x
        self.y = y

        m = re.match(r'^(?P<major>\d+)\.(?P<minor>\d+)(\.(?P<patch>\d+))?',
                     postgis_ver)
        if not m:
            raise ValueError(
                f"Unparseable PostGIS version string '{postgis_ver}'")
        self.postgis_ver = (int(m['major']), int(m['minor']),
                            int(m['patch']) if m['patch'] else None)

        if self.postgis_ver < (3, 0):
            if use_feature_id:
                raise ValueError(
                    f"Feature ID is only available in PostGIS v3.0+")
            self.use_feature_id = False
            self.tile_envelope = 'TileBBox'
        else:
            self.tile_envelope = 'ST_TileEnvelope'
            self.use_feature_id = True if use_feature_id is None else use_feature_id
        self.tile_envelope_margin = False
Ejemplo n.º 10
0
    def __init__(self, url, port, pghost, pgport, dbname, user, password,
                 layers, tileset_path, sql_file, key_column, disable_feature_ids,
                 gzip, verbose, exclude_layers, test_geometry):
        self.url = url
        self.port = port
        self.pghost = pghost
        self.pgport = pgport
        self.dbname = dbname
        self.user = user
        self.password = password
        self.tileset_path = tileset_path
        self.sql_file = sql_file
        self.layer_ids = layers
        self.exclude_layers = exclude_layers
        self.key_column = key_column
        self.gzip = gzip
        self.disable_feature_ids = disable_feature_ids
        self.test_geometry = test_geometry
        self.verbose = verbose

        self.tileset = Tileset.parse(self.tileset_path)
Ejemplo n.º 11
0
    def test_var_substitution(self):
        variables = dict(
            vars=dict(var_substitution_1=14, var_substitution_2='az'))
        data = parsed_data(Case('my_id', ''))
        data.data['tileset']['layers'][0]['file'].data['layer'].update(
            variables)
        ts = Tileset(data)
        layer = ts.layers_by_id['my_id']

        self.assertEqual(
            to_sql(
                'SELECT * from test where zoom > %%VAR:var_substitution_1%%',
                layer, False), 'SELECT * from test where zoom > 14')
        self.assertEqual(
            to_sql(
                "SELECT * from test where zoom > '%%VAR:var_substitution_2%%'",
                layer, False), "SELECT * from test where zoom > 'az'")
        self.assertRaises(
            ValueError, to_sql,
            'SELECT * from test where zoom > %%VAR:var_substitution_3%%',
            layer, False)
Ejemplo n.º 12
0
    def __init__(self, url, port, pghost, pgport, dbname, user, password,
                 layers, tileset_path, sql_file, key_column, disable_feature_ids,
                 gzip, verbose, exclude_layers, test_geometry):
        self.url = url
        self.port = port
        self.pghost = pghost
        self.pgport = pgport
        self.dbname = dbname
        self.user = user
        self.password = password
        self.tileset_path = tileset_path
        self.sql_file = sql_file
        self.layer_ids = layers
        self.exclude_layers = exclude_layers
        self.key_column = key_column
        self.gzip = gzip
        self.disable_feature_ids = disable_feature_ids
        self.test_geometry = test_geometry
        self.verbose = verbose

        self.tileset = Tileset.parse(self.tileset_path)

        self.metadata: Dict[str, Any] = dict(
            format="pbf",
            name=self.tileset.name,
            id=self.tileset.id,
            bounds=self.tileset.bounds,
            center=self.tileset.center,
            maxzoom=self.tileset.maxzoom,
            minzoom=self.tileset.minzoom,
            version=self.tileset.version,
            attribution=self.tileset.attribution,
            description=self.tileset.description,
            pixel_scale=self.tileset.pixel_scale,
            tilejson="2.0.0",
            tiles=[f"{self.url}" + "/tiles/{z}/{x}/{y}.pbf"],
            vector_layers=[],
        )
Ejemplo n.º 13
0
def collect_sql(tileset_filename, parallel=False, nodata=False
                ) -> Union[str, Tuple[str, Dict[str, str], str]]:
    """If parallel is True, returns a sql value that must be executed first, last,
        and a dict of names -> sql code that can be ran in parallel.
        If parallel is False, returns a single sql string.
        nodata=True replaces all '/* DELAY_MATERIALIZED_VIEW_CREATION */'
        with the "WITH NO DATA" SQL."""
    tileset = Tileset(tileset_filename)

    run_first = '-- This SQL code should be executed first\n\n' + \
                get_slice_language_tags(tileset)
    # at this point we don't have any SQL to run at the end
    run_last = '-- This SQL code should be executed last\n'

    # resolved is a map of layer ID to some ID in results.
    # the ID in results could be the same as layer ID, or it could be a tuple of IDs
    resolved = {}
    # results is an ID -> SQL content map
    results = {}
    unresolved = tileset.layers_by_id.copy()
    last_count = -1
    # safety to prevent infinite loop, even though it is also checked in tileset
    while len(resolved) > last_count:
        last_count = len(resolved)
        for lid, layer in list(unresolved.items()):
            if all((v in resolved for v in layer.requires_layers)):
                # All requirements have been resolved.
                resolved[lid] = lid
                results[lid] = layer_to_sql(layer, nodata)
                del unresolved[lid]

                if layer.requires_layers:
                    # If there are more than one requirement, merge them first,
                    # e.g. if there are layers A, B, and C; and C requires A & B,
                    # first concatenate A and B, and then append C to them.
                    # Make sure the same code is not merged multiple times
                    mix = list(layer.requires_layers) + [lid]
                    lid1 = mix[0]
                    for idx in range(1, len(mix)):
                        lid2 = mix[idx]
                        res_id1 = resolved[lid1]
                        res_id2 = resolved[lid2]
                        if res_id1 == res_id2:
                            continue
                        merged_id = res_id1 + '__' + res_id2
                        if merged_id in results:
                            raise ValueError(f'Naming collision - {merged_id} exists')
                        # NOTE: merging will move entity to the end of the list
                        results[merged_id] = results[res_id1] + '\n' + results[res_id2]
                        del results[res_id1]
                        del results[res_id2]
                        # Update resolved IDs to point to the merged result
                        for k, v in resolved.items():
                            if v == res_id1 or v == res_id2:
                                resolved[k] = merged_id
    if unresolved:
        raise ValueError('Circular dependency found in layer requirements: '
                         + ', '.join(unresolved.keys()))

    if not parallel:
        sql = '\n'.join(results.values())
        return f'{run_first}\n{sql}\n{run_last}'
    else:
        return run_first, results, run_last
Ejemplo n.º 14
0
    def __init__(self,
                 tileset: str,
                 tests: List[str],
                 test_all,
                 layers: List[str],
                 zooms: List[int],
                 dbname: str,
                 pghost,
                 pgport: str,
                 user: str,
                 password: str,
                 summary: bool,
                 per_layer: bool,
                 buckets: int,
                 save_to: Union[None, str, Path],
                 compare_with: Union[None, str, Path],
                 key_column: bool,
                 disable_colors: bool = None,
                 disable_feature_ids: bool = None,
                 verbose: bool = None):
        if disable_colors is not None:
            set_color_mode(not disable_colors)
        self.tileset = Tileset.parse(tileset)
        self.dbname = dbname
        self.pghost = pghost
        self.pgport = pgport
        self.user = user
        self.password = password
        self.summary = summary
        self.buckets = buckets
        self.key_column = key_column
        self.disable_feature_ids = disable_feature_ids
        self.verbose = verbose
        self.per_layer = per_layer
        self.save_to = Path(save_to) if save_to else None
        self.results = PerfRoot()

        if compare_with:
            path = Path(compare_with).resolve()
            with path.open('r', encoding='utf-8') as fp:
                self.old_run: PerfRoot = PerfRoot.from_dict(json.load(fp))
            since = round_td(dt.utcnow() -
                             dt.fromisoformat(self.old_run.created))
            print(
                f"Comparing results with a previous run created {since} ago: {path}"
            )
        else:
            self.old_run = None

        for test in tests:
            if test not in TEST_CASES:
                cases = '\n'.join(map(TestCase.fmt_table, TEST_CASES.values()))
                raise DocoptExit(f"Test '{test}' is not defined. "
                                 f"Available tests are:\n{cases}\n")
        if test_all:
            # Do this after validating individual tests, they are ignored but validated
            tests = [v for v in TEST_CASES.keys() if v != 'null']
        if per_layer and not layers:
            layers = [l["layer"]['id'] for l in self.tileset.layers]
        # Keep the order, but ensure no duplicates
        layers = list(dict.fromkeys(layers))
        tests = list(dict.fromkeys(tests))
        zooms = list(dict.fromkeys(zooms))
        self.tests = []
        old_tests = self.old_run.tests if self.old_run else None
        for layer in (layers if per_layer else [None]):
            for test in tests:
                for z in zooms:
                    tc = self.create_testcase(test, z, layer or layers)
                    if old_tests:
                        tc.old_result = next(
                            (v for v in old_tests
                             if v.id == tc.id and v.layers == tc.layers_id
                             and v.zoom == tc.zoom), None)
                    self.tests.append(tc)
Ejemplo n.º 15
0
def serve(port, pghost, pgport, dbname, user, password, metadata, tileset_path,
          sql_file, mask_layer, mask_zoom, verbose):
    fname = 'getTile'
    tileset = Tileset.parse(tileset_path)

    if sql_file:
        with open(sql_file) as stream:
            prepared_sql = stream.read()
        print(f'Loaded {sql_file}')
    else:
        prepared_sql = generate_sqltomvt_preparer({
            'tileset': tileset,
            'fname': fname,
            'mask-layer': mask_layer,
            'mask-zoom': mask_zoom,
        })

    print(
        f'Connecting to PostgreSQL at {pghost}:{pgport}, db={dbname}, user={user}...'
    )
    connection = psycopg2.connect(
        dbname=dbname,
        host=pghost,
        port=pgport,
        user=user,
        password=password,
    )
    cursor = connection.cursor()

    # Get all Postgres types and keep those we know about (could be optimized further)
    known_types = dict(bool="Boolean",
                       text="String",
                       int4="Number",
                       int8="Number")
    cursor.execute("select oid, typname from pg_type")
    pg_types = {
        row[0]: known_types[row[1]]
        for row in cursor.fetchall() if row[1] in known_types
    }

    vector_layers = []
    for layer_def in tileset.layers:
        layer = layer_def["layer"]

        # Get field names and types by executing a dummy query
        query = (layer['datasource']['query'].format(
            name_languages=languages_to_sql(
                tileset.definition.get('languages', []))).replace(
                    "!bbox!", "TileBBox(0, 0, 0)").replace(
                        "z(!scale_denominator!)", "0").replace(
                            "!pixel_width!",
                            str(PIXEL_SCALE)).replace("!pixel_height!",
                                                      str(PIXEL_SCALE)))
        cursor.execute(f"SELECT * FROM {query} WHERE false LIMIT 0")
        fields = {
            fld.name: pg_types[fld.type_code]
            for fld in cursor.description if fld.type_code in pg_types
        }

        vector_layers.append(
            dict(
                id=layer["id"],
                fields=fields,
                maxzoom=metadata["maxzoom"],
                minzoom=metadata["minzoom"],
                description=layer["description"],
            ))

    metadata["vector_layers"] = vector_layers
    metadata["tiles"] = [f"http://localhost:{port}" + "/tiles/{z}/{x}/{y}.pbf"]

    if verbose:
        print(
            f'Using prepared SQL:\n\n-------\n\n{prepared_sql}\n\n-------\n\n')

    try:
        cursor.execute(prepared_sql)
    finally:
        cursor.close()

    query = f"EXECUTE {fname}(%s, %s, %s)"
    print(f'Will use "{query}" to get vector tiles.')

    tornado.log.access_log.setLevel(logging.INFO if verbose else logging.ERROR)

    application = tornado.web.Application([
        (r"/", GetMetadata, dict(metadata=metadata)),
        (r"/tiles/([0-9]+)/([0-9]+)/([0-9]+).pbf", GetTile,
         dict(fname=fname, connection=connection, query=query)),
    ])

    application.listen(port)
    print(f"Postserve started, listening on 0.0.0.0:{port}")
    print(f"Use http://localhost:{port} as the data source")

    tornado.ioloop.IOLoop.instance().start()