Esempio n. 1
0
def proxy(subpath):
    app.logger.info(
        f"{request.method} GeoServer proxy, user={g.user}, subpath={subpath}, url={request.url}, request.query_string={request.query_string.decode('UTF-8')}"
    )

    url = settings.LAYMAN_GS_URL + subpath + '?' + request.query_string.decode(
        'UTF-8')
    headers_req = {
        key.lower(): value
        for (key, value) in request.headers if key.lower() not in
        ['host',
         settings.LAYMAN_GS_AUTHN_HTTP_HEADER_ATTRIBUTE.lower()]
    }
    data = request.get_data()
    authn_username = authn.get_authn_username()
    if is_user_with_name(authn_username):
        headers_req[
            settings.LAYMAN_GS_AUTHN_HTTP_HEADER_ATTRIBUTE] = authn_username

    app.logger.info(
        f"{request.method} GeoServer proxy, headers_req={headers_req}, url={url}"
    )
    wfs_t_layers = set()
    if data is not None and len(data) > 0:
        try:
            wfs_t_attribs, wfs_t_layers = extract_attributes_and_layers_from_wfs_t(
                data)
            if wfs_t_attribs:
                ensure_wfs_t_attributes(wfs_t_attribs)
        except BaseException as err:
            app.logger.warning(
                f"WFS Proxy: error={err}, trace={traceback.format_exc()}")
    response = requests.request(method=request.method,
                                url=url,
                                data=data,
                                headers=headers_req,
                                cookies=request.cookies,
                                allow_redirects=False)

    if response.status_code == 200:
        for workspace, layername in wfs_t_layers:
            if authz.can_i_edit(LAYER_TYPE, workspace, layername):
                patch_after_feature_change(workspace, layername)

    excluded_headers = [
        'content-encoding', 'content-length', 'transfer-encoding', 'connection'
    ]
    headers = {
        key: value
        for (key, value) in response.headers.items()
        if key.lower() not in excluded_headers
    }

    final_response = Response(response.content, response.status_code, headers)
    return final_response
Esempio n. 2
0
def setup_post_access_rights(request_form, kwargs, actor_name):
    kwargs['access_rights'] = dict()
    for type in ['read', 'write']:
        if not request_form.get('access_rights.' + type):
            if is_user_with_name(actor_name):
                access_rights = [actor_name]
            else:
                access_rights = [settings.RIGHTS_EVERYONE_ROLE]
        else:
            access_rights = list({
                x.strip()
                for x in request_form['access_rights.' + type].split(',')
            })
        kwargs['access_rights'][type] = access_rights
Esempio n. 3
0
def reserve_username(username, adjust=False):
    current_username = authn.get_authn_username()
    if is_user_with_name(current_username):
        raise LaymanError(34, {'username': current_username})
    if adjust is not True:
        check_workspace_name(username)
        workspaces = get_workspaces()
        if username in workspaces:
            raise LaymanError(35)
        try:
            ensure_whole_user(username)
            claims = get_open_id_claims()
            _save_reservation(username, claims)
        except LaymanError as exc:
            delete_whole_user(username)
            raise exc
        return
    claims = get_open_id_claims()
    suggestions = [username] + get_username_suggestions_from_claims(claims)
    suggestions = [
        slugify(s) for s in suggestions if s is not None and len(s) > 0
    ]
    suggestions = to_safe_names(suggestions, 'user')
    workspaces = get_workspaces()
    username = None
    idx = 0
    while True:
        for suggestion in suggestions:
            if idx > 0:
                suggestion = f"{suggestion}{idx}"
            try:
                check_workspace_name(suggestion)
            except LaymanError as exc:
                if not (exc.code == 2 or exc.code == 35):
                    raise exc
            if suggestion in workspaces:
                continue
            try:
                ensure_whole_user(suggestion)
                username = suggestion
                _save_reservation(username, claims)
                break
            except LaymanError:
                delete_whole_user(suggestion)
        if username is not None:
            break
        idx += 1
Esempio n. 4
0
def get_publication_infos_with_metainfo(
    workspace_name=None,
    pub_type=None,
    style_type=None,
    reader=None,
    writer=None,
    limit=None,
    offset=None,
    full_text_filter=None,
    bbox_filter=None,
    order_by_list=None,
    ordering_full_text=None,
    ordering_bbox=None,
):
    order_by_list = order_by_list or []

    full_text_tsquery = db_util.to_tsquery_string(
        full_text_filter) if full_text_filter else None
    full_text_like = '%' + full_text_filter + '%' if full_text_filter else None
    ordering_full_text_tsquery = db_util.to_tsquery_string(
        ordering_full_text) if ordering_full_text else None

    where_params_def = [
        (workspace_name, 'w.name = %s', (workspace_name, )),
        (pub_type, 'p.type = %s', (pub_type, )),
        (style_type, 'p.style_type::text = %s', (style_type, )),
        (reader
         and not is_user_with_name(reader), 'p.everyone_can_read = TRUE',
         tuple()),
        (is_user_with_name(reader), f"""(p.everyone_can_read = TRUE
                        or (u.id is not null and w.name = %s)
                        or EXISTS(select 1
                                  from {DB_SCHEMA}.rights r inner join
                                       {DB_SCHEMA}.users u2 on r.id_user = u2.id inner join
                                       {DB_SCHEMA}.workspaces w2 on w2.id = u2.id_workspace
                                  where r.id_publication = p.id
                                    and r.type = 'read'
                                    and w2.name = %s))""", (
            reader,
            reader,
        )),
        (writer
         and not is_user_with_name(writer), 'p.everyone_can_write = TRUE',
         tuple()),
        (is_user_with_name(writer), f"""(p.everyone_can_write = TRUE
                        or (u.id is not null and w.name = %s)
                        or EXISTS(select 1
                                  from {DB_SCHEMA}.rights r inner join
                                       {DB_SCHEMA}.users u2 on r.id_user = u2.id inner join
                                       {DB_SCHEMA}.workspaces w2 on w2.id = u2.id_workspace
                                  where r.id_publication = p.id
                                    and r.type = 'write'
                                    and w2.name = %s))""", (
            writer,
            writer,
        )),
        (full_text_filter,
         '(_prime_schema.my_unaccent(p.title) @@ to_tsquery(unaccent(%s))'
         'or lower(unaccent(p.title)) like lower(unaccent(%s)))', (
             full_text_tsquery,
             full_text_like,
         )),
        (bbox_filter,
         'p.bbox && ST_MakeBox2D(ST_MakePoint(%s, %s), ST_MakePoint(%s, %s))',
         bbox_filter),
    ]

    order_by_definition = {
        consts.ORDER_BY_FULL_TEXT:
        ('ts_rank_cd(_prime_schema.my_unaccent(p.title), to_tsquery(unaccent(%s))) DESC',
         (ordering_full_text_tsquery, )),
        consts.ORDER_BY_TITLE: ('lower(unaccent(p.title)) ASC', tuple()),
        consts.ORDER_BY_LAST_CHANGE: ('updated_at DESC', tuple()),
        consts.ORDER_BY_BBOX: ("""
            -- A∩B / (A + B)
            CASE
                -- if there is any intersection
                WHEN p.bbox && ST_MakeBox2D(ST_MakePoint(%s, %s),
                                            ST_MakePoint(%s, %s))
                    THEN
                        -- in cases, when area of intersection is 0, we want it rank higher than no intersection
                        GREATEST(st_area(st_intersection(p.bbox, ST_MakeBox2D(ST_MakePoint(%s, %s),
                                                                              ST_MakePoint(%s, %s)))),
                                 1)
                        -- we have to solve division by 0
                        / (GREATEST(st_area(p.bbox), 1) +
                           GREATEST(st_area(ST_MakeBox2D(ST_MakePoint(%s, %s),
                                                         ST_MakePoint(%s, %s))),
                                    1)
                           )
                -- if there is no intersection, result is 0 in all cases
                ELSE
                    0
            END DESC
            """, ordering_bbox + ordering_bbox +
                               ordering_bbox if ordering_bbox else tuple()),
    }

    assert all(ordering_item in order_by_definition.keys()
               for ordering_item in order_by_list)

    #########################################################
    # SELECT clause
    select_clause = f"""
select p.id as id_publication,
       w.name as workspace_name,
       p.type,
       p.name,
       p.title,
       p.uuid::text,
       p.style_type,
       p.updated_at,
       ST_XMIN(p.bbox) as xmin,
       ST_YMIN(p.bbox) as ymin,
       ST_XMAX(p.bbox) as xmax,
       ST_YMAX(p.bbox) as ymax,
       (select rtrim(concat(case when u.id is not null then w.name || ',' end,
                            string_agg(w2.name, ',') || ',',
                            case when p.everyone_can_read then %s || ',' end
                            ), ',')
        from {DB_SCHEMA}.rights r inner join
             {DB_SCHEMA}.users u2 on r.id_user = u2.id inner join
             {DB_SCHEMA}.workspaces w2 on w2.id = u2.id_workspace
        where r.id_publication = p.id
          and r.type = 'read') can_read_users,
       (select rtrim(concat(case when u.id is not null then w.name || ',' end,
                            string_agg(w2.name, ',') || ',',
                            case when p.everyone_can_write then %s || ',' end
                            ), ',')
        from {DB_SCHEMA}.rights r inner join
             {DB_SCHEMA}.users u2 on r.id_user = u2.id inner join
             {DB_SCHEMA}.workspaces w2 on w2.id = u2.id_workspace
        where r.id_publication = p.id
          and r.type = 'write') can_write_users,
       count(*) OVER() AS full_count
from {DB_SCHEMA}.workspaces w inner join
     {DB_SCHEMA}.publications p on p.id_workspace = w.id left join
     {DB_SCHEMA}.users u on u.id_workspace = w.id
"""
    select_params = (
        ROLE_EVERYONE,
        ROLE_EVERYONE,
    )

    #########################################################
    # WHERE clause
    where_params = tuple()
    where_parts = list()
    for (
            value,
            where_part,
            params,
    ) in where_params_def:
        if value:
            where_parts.append(where_part)
            where_params = where_params + params
    where_clause = ''
    if where_parts:
        where_clause = 'WHERE ' + '\n  AND '.join(where_parts) + '\n'

    #########################################################
    # ORDER BY clause
    order_by_params = tuple()
    order_by_parts = list()
    for order_by_part in order_by_list:
        order_by_parts.append(order_by_definition[order_by_part][0])
        order_by_params = order_by_params + order_by_definition[order_by_part][
            1]

    order_by_parts.append('w.name ASC')
    order_by_parts.append('p.name ASC')
    order_by_clause = 'ORDER BY ' + ', '.join(order_by_parts)

    #########################################################
    # Pagination clause
    pagination_params = tuple()
    pagination_clause = ''

    if limit is not None:
        assert limit >= 0
        assert isinstance(limit, int)
        pagination_clause = pagination_clause + f' LIMIT {limit} '
    if offset is not None:
        assert offset >= 0
        assert isinstance(offset, int)
        pagination_clause = pagination_clause + f' OFFSET {offset} '

    #########################################################
    # Put it together
    sql_params = select_params + where_params + order_by_params + pagination_params
    select = select_clause + where_clause + order_by_clause + pagination_clause
    values = db_util.run_query(select, sql_params)

    # print(f'get_publication_infos:\n\nselect={select}\n\nsql_params={sql_params}\n\n&&&&&&&&&&&&&&&&&')

    infos = {(
        workspace_name,
        type,
        publication_name,
    ): {
        'id': id_publication,
        'name': publication_name,
        'title': title,
        'uuid': uuid,
        'type': type,
        'style_type': style_type,
        'updated_at': updated_at,
        'bounding_box': [xmin, ymin, xmax, ymax],
        'access_rights': {
            'read': can_read_users.split(','),
            'write': can_write_users.split(',')
        }
    }
             for id_publication, workspace_name, type, publication_name, title,
             uuid, style_type, updated_at, xmin, ymin, xmax, ymax,
             can_read_users, can_write_users, _ in values}

    if values:
        total_count = values[0][-1]
    else:
        count_clause = f"""
        select count(*) AS full_count
        from {DB_SCHEMA}.workspaces w inner join
             {DB_SCHEMA}.publications p on p.id_workspace = w.id left join
             {DB_SCHEMA}.users u on u.id_workspace = w.id
        """
        sql_params = where_params
        select = count_clause + where_clause
        count = db_util.run_query(select, sql_params)
        total_count = count[0][-1]

    if infos:
        start = offset + 1 if offset else 1
        content_range = (start, start + len(infos) - 1)
    else:
        content_range = (0, 0)

    result = {
        'items': infos,
        'total_count': total_count,
        'content_range': content_range,
    }
    return result
Esempio n. 5
0
def generate_map_thumbnail(username, mapname, editor):
    map_info = get_publication_info(username,
                                    MAP_TYPE,
                                    mapname,
                                    context={'keys': ['file']})
    map_file_get_url = map_info['_file']['url']

    params = urlencode({
        'map_def_url': map_file_get_url,
        'gs_url':
        f"http://{settings.LAYMAN_SERVER_NAME}{settings.LAYMAN_GS_PATH}",
        'gs_public_url': f"{settings.LAYMAN_GS_PROXY_BASE_URL}",
        'editor': editor if is_user_with_name(editor) else '',
        'proxy_header': settings.LAYMAN_AUTHN_HTTP_HEADER_NAME,
        # 'file_name': tmp_file_name,
    })
    timgen_url = f"{settings.LAYMAN_TIMGEN_URL}?{params}"
    current_app.logger.info(f"Timgen URL: {timgen_url}")

    chrome_options = Options()
    chrome_options.add_argument("--headless")
    chrome_options.add_argument("--no-sandbox")
    desired_capabilities = DesiredCapabilities.CHROME
    desired_capabilities['goog:loggingPrefs'] = {'browser': 'ALL'}
    chrome = webdriver.Chrome(
        options=chrome_options,
        desired_capabilities=desired_capabilities,
    )
    chrome.set_window_size(500, 500)

    chrome.get(timgen_url)
    entries = chrome.get_log('browser')
    max_attempts = 40
    attempts = 0
    while next((e for e in entries if e['level'] != 'INFO' or
                (e['level'] == 'INFO' and '"dataurl" "data:image/png;base64,'
                 in e['message'])), None) is None and attempts < max_attempts:
        current_app.logger.info(f"waiting for entries")
        time.sleep(0.5)
        attempts += 1
        entries = chrome.get_log('browser')
    if attempts >= max_attempts:
        current_app.logger.info(f"max attempts reach")
        return
    for entry in entries:
        current_app.logger.info(f"browser entry {entry}")

    # chrome.save_screenshot(f'/code/tmp/{username}.{mapname}.png')
    chrome.close()
    chrome.quit()

    entry = next((e for e in entries if e['level'] == 'INFO'
                  and '"dataurl" "data:image/png;base64,' in e['message']),
                 None)
    if entry is None:
        return
    match = re.match(r'.*\"dataurl\" \"data:image/png;base64,(.+)\"',
                     entry['message'])
    if not match:
        return
    groups = match.groups()
    if len(groups) < 1:
        return
    data_url = groups[0]
    # current_app.logger.info(f"data_url {data_url}")
    # current_app.logger.info(f"len(data_url) {len(data_url)}")

    ensure_map_thumbnail_dir(username, mapname)
    file_path = get_map_thumbnail_path(username, mapname)
    try:
        os.remove(file_path)
    except OSError:
        pass

    with open(file_path, 'wb') as file:
        file.write(base64.b64decode(data_url))
Esempio n. 6
0
def proxy(subpath):
    app.logger.info(
        f"{request.method} GeoServer proxy, actor={g.user}, subpath={subpath}, url={request.url}, request.query_string={request.query_string.decode('UTF-8')}"
    )

    # adjust authentication headers
    url = settings.LAYMAN_GS_URL + subpath
    query_params_string = request.query_string.decode('UTF-8')
    headers_req = {
        key.lower(): value
        for (key, value) in request.headers if key.lower() not in
        ['host',
         settings.LAYMAN_GS_AUTHN_HTTP_HEADER_ATTRIBUTE.lower()]
    }
    data = request.get_data()
    authn_username = authn.get_authn_username()
    if is_user_with_name(authn_username):
        headers_req[
            settings.LAYMAN_GS_AUTHN_HTTP_HEADER_ATTRIBUTE] = authn_username

    # ensure layer attributes in case of WFS-T
    app.logger.info(
        f"{request.method} GeoServer proxy, headers_req={headers_req}, url={url}"
    )
    wfs_t_layers = set()
    if data is not None and len(data) > 0:
        try:
            wfs_t_attribs, wfs_t_layers = extract_attributes_and_layers_from_wfs_t(
                data)
            if wfs_t_attribs:
                ensure_wfs_t_attributes(wfs_t_attribs)
        except BaseException as err:
            app.logger.warning(
                f"WFS Proxy: error={err}, trace={traceback.format_exc()}")

    query_params = CaseInsensitiveDict(request.args.to_dict())

    # change CRS:84 to EPSG:4326 if one of SLD layers has native CRS EPSG:5514
    # otherwise layers are shifted by hundreds of meters, we are not sure about the reason
    if query_params.get('service') == 'WMS' and query_params.get('request') == 'GetMap'\
       and (query_params.get('crs') or query_params.get('srs')) == crs_def.CRS_84:
        layers = [
            layer.split(':') for layer in query_params.get('layers').split(',')
        ]
        url_workspace = extract_workspace_from_url(subpath)
        layers = [
            layer if len(layer) == 2 else [url_workspace] + layer
            for layer in layers
        ]
        fix_params = False
        for geoserver_workspace, layer in layers:
            workspace = gs_wms.get_layman_workspace(geoserver_workspace)
            publ_info = layman_util.get_publication_info(
                workspace, LAYER_TYPE, layer,
                {'keys': ['native_crs', 'style_type']})
            if publ_info and publ_info.get(
                    'native_crs') == crs_def.EPSG_5514 and publ_info.get(
                        'style_type') == 'sld':
                fix_params = True
                break

        if fix_params:
            if query_params.get('crs') == crs_def.CRS_84:
                param_key = 'crs'
                bbox = query_params['bbox'].split(',')
                bbox = [bbox[1], bbox[0], bbox[3], bbox[2]]
                query_params['bbox'] = ",".join(bbox)
            else:
                param_key = 'srs'

            query_params[param_key] = crs_def.EPSG_4326
            query_params_string = parse.urlencode(query_params)

    url += '?' + query_params_string

    app.logger.info(f"{request.method} GeoServer proxy, final_url={url}")

    response = requests.request(method=request.method,
                                url=url,
                                data=data,
                                headers=headers_req,
                                cookies=request.cookies,
                                allow_redirects=False)

    if response.status_code == 200:
        for workspace, layername in wfs_t_layers:
            file_info = layman_util.get_publication_info(
                workspace, LAYER_TYPE, layername, context={'keys':
                                                           ['file']})['file']
            if authz.can_i_edit(
                    LAYER_TYPE, workspace, layername
            ) and file_info['file_type'] == settings.FILE_TYPE_VECTOR:
                patch_after_feature_change(workspace, layername)

    excluded_headers = [
        'content-encoding', 'content-length', 'transfer-encoding', 'connection'
    ]
    headers = {
        key: value
        for (key, value) in response.headers.items()
        if key.lower() not in excluded_headers
    }

    final_response = Response(response.content, response.status_code, headers)
    return final_response