Exemple #1
0
    def proxy(self) -> Response:

        if self.user is None and "authentication_required" in self.request.params:
            LOG.debug("proxy() detected authentication_required")
            raise HTTPUnauthorized(headers={
                "WWW-Authenticate":
                'Basic realm="Access to restricted layers"'
            })

        # We have a user logged in. We need to set group_id and possible layer_name in the params. We set
        # layer_name when either QUERY_PARAMS or LAYERS is set in the WMS params, i.e. for GetMap and
        # GetFeatureInfo requests. For GetLegendGraphic requests we do not send layer_name, but MapServer
        # should not use the DATA string for GetLegendGraphic.

        if self.ogc_server.auth == main.OGCSERVER_AUTH_STANDARD:
            self.params["role_ids"] = ",".join(
                [str(e) for e in get_roles_id(self.request)])

            # In some application we want to display the features owned by a user than we need his id.
            self.params[
                "user_id"] = self.user.id if self.user is not None else "-1"

        # Do not allows direct variable substitution
        for k in list(self.params.keys()):
            if k[:2].capitalize() == "S_":
                LOG.warning("Direct substitution not allowed (%s=%s).", k,
                            self.params[k])
                del self.params[k]

        # add functionalities params
        self.params.update(get_mapserver_substitution_params(self.request))

        # get method
        method = self.request.method

        # we want the browser to cache GetLegendGraphic and
        # DescribeFeatureType requests
        use_cache = False

        errors: Set[str] = set()
        if method == "GET":
            # For GET requests, params are added only if the self.request
            # parameter is actually provided.
            if "request" not in self.lower_params:
                self.params = {}
            else:
                if self.ogc_server.type != main.OGCSERVER_TYPE_QGISSERVER or "user_id" not in self.params:

                    use_cache = self.lower_params["request"] in (
                        "getlegendgraphic", )

                    # no user_id and role_id or cached queries
                    if use_cache and "user_id" in self.params:
                        del self.params["user_id"]
                    if use_cache and "role_ids" in self.params:
                        del self.params["role_ids"]

            if "service" in self.lower_params and self.lower_params[
                    "service"] == "wfs":
                _url = self._get_wfs_url(errors)
            else:
                _url = self._get_wms_url(errors)
        else:
            # POST means WFS
            _url = self._get_wfs_url(errors)

        if _url is None:
            LOG.error("Error getting the URL:\n%s", "\n".join(errors))
            raise HTTPInternalServerError()

        cache_control = Cache.PRIVATE
        if method == "GET" and "service" in self.lower_params and self.lower_params[
                "service"] == "wms":
            if self.lower_params.get("request") in ("getmap",
                                                    "getfeatureinfo"):
                cache_control = Cache.NO
            elif self.lower_params.get("request") == "getlegendgraphic":
                cache_control = Cache.PUBLIC
        elif method == "GET" and "service" in self.lower_params and self.lower_params[
                "service"] == "wfs":
            if self.lower_params.get("request") == "getfeature":
                cache_control = Cache.NO
        elif method != "GET":
            cache_control = Cache.NO

        headers = self._get_headers()
        # Add headers for Geoserver
        if self.ogc_server.auth == main.OGCSERVER_AUTH_GEOSERVER:
            headers["sec-username"] = self.user.username
            headers["sec-roles"] = ";".join(get_roles_name(self.request))

        response = self._proxy_callback(
            cache_control,
            url=_url,
            params=self.params,
            cache=use_cache,
            headers=headers,
            body=self.request.body,
        )

        if (self.lower_params.get("request") == "getmap"
                and not response.content_type.startswith("image/")
                and response.status_code < 400):
            response.status_code = 400

        return response
Exemple #2
0
    def get_report(self):
        id = self.request.matchdict["id"]
        self.layername = self.request.matchdict["layername"]
        self.layer_config = self.config.get("layers", {}).get(self.layername, {})

        if self._get_config("check_credentials", True):
            # check user credentials
            role_id = None if self.request.user is None else \
                self.request.user.role.id

            # FIXME: support of mapserver groups
            if self.layername in get_private_layers() and \
                    self.layername not in get_protected_layers(role_id):
                raise HTTPForbidden

        srs = self._get_config("srs")
        if srs is None:
            raise HTTPInternalServerError(
                "Missing 'srs' in service configuration"
            )

        mapserv_url = self.request.route_url("mapserverproxy")
        vector_request_url = "%s?%s" % (
            mapserv_url,
            "&".join(["%s=%s" % i for i in {
                "service": "WFS",
                "version": "1.1.0",
                "outputformat": "gml3",
                "request": "GetFeature",
                "typeName": self.layername,
                "featureid": self.layername + "." + id,
                "srsName": "epsg:" + str(srs)
            }.items()])
        )

        spec = self._get_config("spec")
        if spec is None:
            spec = {
                "layout": self.layername,
                "outputFormat": "pdf",
                "attributes": {
                    "paramID": id
                }
            }
            map_config = self.layer_config.get("map")
            if map_config is not None:
                spec["attributes"]["map"] = self._build_map(
                    mapserv_url, vector_request_url, srs, map_config
                )

            maps_config = self.layer_config.get("maps")
            if maps_config is not None:
                spec["attributes"]["maps"] = []
                for map_config in maps_config:
                    spec["attributes"]["maps"].append(self._build_map(
                        mapserv_url, vector_request_url, srs, map_config
                    ))
        else:
            spec = loads(dumps(spec) % {
                "layername": self.layername,
                "id": id,
                "srs": srs,
                "mapserv_url": mapserv_url,
                "vector_request_url": vector_request_url,
            })

        return self._do_print(spec)
def make_internal_server_error(message):
    error = HTTPInternalServerError()
    error.content_type = "text/plain"
    error.text = message
    return error
Exemple #4
0
def feedback(self, request):
    defaultRecipient = '*****@*****.**'
    defaultSubject = 'Customer feedback'

    def getParam(param, defaultValue):
        val = request.params.get(param, defaultValue)
        val = val if val != '' else defaultValue
        return val

    def mail(to, subject, text, attachement):
        from email.MIMEMultipart import MIMEMultipart
        from email.MIMEBase import MIMEBase
        from email.MIMEText import MIMEText
        from email import Encoders
        import unicodedata
        import smtplib

        msg = MIMEMultipart()

        msg['To'] = to
        msg['Subject'] = subject

        msg.attach(
            MIMEText(
                unicodedata.normalize('NFKD',
                                      unicode(text)).encode('ascii',
                                                            'ignore')))
        # Handle attachements
        if isinstance(attachement, cgi.FieldStorage):
            types = attachement.type.split('/')
            if len(types) != 2:
                raise HTTPInternalServerError(
                    'File type could not be determined')
            part = MIMEBase(types[0], types[1])
            filePart = attachement.file.read()
            part.set_payload(filePart)
            Encoders.encode_base64(part)
            part.add_header('Content-Disposition',
                            'attachment; filename="%s"' % attachement.filename)
            msg.attach(part)

        mailServer = smtplib.SMTP('127.0.0.1', 25)
        mailServer.ehlo()
        mailServer.starttls()
        mailServer.ehlo()
        # Recipients and sender are always the same
        mailServer.sendmail(to, to, msg.as_string())
        mailServer.close()

    ua = getParam('ua', 'no user-agent found')
    permalink = getParam('permalink', 'No permalink provided')
    feedback = getParam('feedback', 'No feedback provided')
    email = getParam('email', 'Anonymous')
    text = '%s just sent a feedback:\n %s. \nPermalink: %s. \n\nUser-Agent: %s'
    attachement = getParam('attachement', None)

    try:
        mail(defaultRecipient, defaultSubject,
             text % (email, feedback, permalink, ua), attachement)
    except SMTPException:
        raise HTTPInternalServerError()

    return {'success': True}
Exemple #5
0
    def fulltextsearch(self):
        lang = locale_negotiator(self.request)

        try:
            language = self.languages[lang]
        except KeyError:
            return HTTPInternalServerError(
                detail="{0!s} not defined in languages".format(lang))

        if "query" not in self.request.params:
            return HTTPBadRequest(detail="no query")
        terms = self.request.params.get("query")

        maxlimit = self.settings.get("maxlimit", 200)

        try:
            limit = int(
                self.request.params.get("limit",
                                        self.settings.get("defaultlimit", 30)))
        except ValueError:
            return HTTPBadRequest(detail="limit value is incorrect")
        if limit > maxlimit:
            limit = maxlimit

        try:
            partitionlimit = int(self.request.params.get("partitionlimit", 0))
        except ValueError:
            return HTTPBadRequest(detail="partitionlimit value is incorrect")
        if partitionlimit > maxlimit:
            partitionlimit = maxlimit

        terms_ts = "&".join(
            w + ":*" for w in IGNORED_CHARS_RE.sub(" ", terms).split(" ")
            if w != "")
        _filter = FullTextSearch.ts.op("@@")(func.to_tsquery(
            language, terms_ts))

        if self.request.user is None:
            _filter = and_(_filter, FullTextSearch.public.is_(True))
        else:
            _filter = and_(
                _filter,
                or_(
                    FullTextSearch.public.is_(True),
                    FullTextSearch.role_id.is_(None),
                    FullTextSearch.role_id.in_(
                        [r.id for r in self.request.user.roles])))

        if "interface" in self.request.params:
            _filter = and_(
                _filter,
                or_(
                    FullTextSearch.interface_id.is_(None),
                    FullTextSearch.interface_id == self._get_interface_id(
                        self.request.params["interface"])))
        else:
            _filter = and_(_filter, FullTextSearch.interface_id.is_(None))

        _filter = and_(
            _filter,
            or_(
                FullTextSearch.lang.is_(None),
                FullTextSearch.lang == lang,
            ))

        rank_system = self.request.params.get("ranksystem")
        if rank_system == "ts_rank_cd":
            # The numbers used in ts_rank_cd() below indicate a normalization method.
            # Several normalization methods can be combined using |.
            # 2 divides the rank by the document length
            # 8 divides the rank by the number of unique words in document
            # By combining them, shorter results seem to be preferred over longer ones
            # with the same ratio of matching words. But this relies only on testing it
            # and on some assumptions about how it might be calculated
            # (the normalization is applied two times with the combination of 2 and 8,
            # so the effect on at least the one-word-results is therefore stronger).
            rank = func.ts_rank_cd(FullTextSearch.ts,
                                   func.to_tsquery(language, terms_ts), 2 | 8)
        else:
            # Use similarity ranking system from module pg_trgm.
            rank = func.similarity(FullTextSearch.label, terms)

        if partitionlimit:
            # Here we want to partition the search results based on
            # layer_name and limit each partition.
            row_number = func.row_number().over(
                partition_by=FullTextSearch.layer_name,
                order_by=(desc(rank),
                          FullTextSearch.label)).label("row_number")
            subq = DBSession.query(FullTextSearch) \
                .add_columns(row_number).filter(_filter).subquery()
            query = DBSession.query(subq.c.id, subq.c.label, subq.c.params,
                                    subq.c.layer_name, subq.c.the_geom,
                                    subq.c.actions)
            query = query.filter(subq.c.row_number <= partitionlimit)
        else:
            query = DBSession.query(FullTextSearch).filter(_filter)
            query = query.order_by(desc(rank))
            query = query.order_by(FullTextSearch.label)

        query = query.limit(limit)
        objs = query.all()

        features = []
        for o in objs:
            properties = {
                "label": o.label,
            }
            if o.layer_name is not None:
                properties["layer_name"] = o.layer_name
            if o.params is not None:
                properties["params"] = o.params
            if o.actions is not None:
                properties["actions"] = o.actions
            if o.actions is None and o.layer_name is not None:
                properties["actions"] = [{
                    "action": "add_layer",
                    "data": o.layer_name,
                }]

            if o.the_geom is not None:
                geom = to_shape(o.the_geom)
                feature = Feature(id=o.id,
                                  geometry=geom,
                                  properties=properties,
                                  bbox=geom.bounds)
                features.append(feature)
            else:
                feature = Feature(id=o.id, properties=properties)
                features.append(feature)

        return FeatureCollection(features)
Exemple #6
0
    def create(self):

        if "url" not in self.request.params:
            raise HTTPBadRequest("The parameter url is required")

        url = self.request.params["url"]

        # Check that it is an internal URL...
        uri_parts = urlparse(url)
        hostname = uri_parts.hostname
        if "allowed_hosts" in self.settings:
            if hostname not in self.settings[
                    "allowed_hosts"]:  # pragma: no cover
                raise HTTPBadRequest("The requested host is not allowed.")
        else:
            if hostname != self.request.server_name:
                raise HTTPBadRequest("The requested host '%s' should be '%s'" %
                                     (hostname, self.request.server_name))

        shortened = False

        for base in self.short_bases:
            base_parts = urlparse(base)
            if uri_parts.path.startswith(base_parts.path):
                shortened = True
                ref = uri_parts.path.split("/")[-1]

        tries = 0
        while not shortened:
            ref = "".join(
                random.choice(string.ascii_letters + string.digits)
                for i in range(self.settings.get("length", 4)))
            test_url = DBSession.query(Shorturl).filter(
                Shorturl.ref == ref).all()
            if len(test_url) == 0:
                break
            tries += 1  # pragma: no cover
            if tries > 20:  # pragma: no cover
                message = "No free ref found, considere to increase the length"
                logging.error(message)
                raise HTTPInternalServerError(message)

        user_email = self.request.user.email \
            if self.request.user is not None else None
        email = self.request.params.get("email")
        if not shortened:
            short_url = Shorturl()
            short_url.url = url
            short_url.ref = ref
            short_url.creator_email = user_email
            short_url.creation = datetime.now()
            short_url.nb_hits = 0

            DBSession.add(short_url)

        if "base_url" in self.settings:
            s_url = self.settings["base_url"] + ref
        else:
            s_url = self.request.route_url("shortener_get", ref=ref)

        if \
                email is not None and \
                "email_from" in self.settings and \
                "email_subject" in self.settings and \
                "email_body" in self.settings and \
                "smtp_server" in self.settings:  # pragma: no cover
            text = self.settings["email_body"] % {
                "full_url": url,
                "short_url": s_url,
                "message": self.request.params.get("message", ""),
            }
            send_email(
                self.settings["email_from"],
                [email],
                text.encode("utf-8"),
                self.settings["email_subject"],
                self.settings["smtp_server"],
            )

        set_common_headers(self.request, "shortner", NO_CACHE)
        return {"short_url": s_url}
            'sblist': sblist,
            'ldata': sblist.fetch(to_add, to_sub)
        }

        # Not publishing deltas for this list?  Delete all previous chunks to
        # make way for the new corpus
        # if _setting(request, list_info.name, 'not_publishing_deltas'):
        if sblist.settings.get('not_publishing_deltas'):
            # Raise hell if we have suspicious data with this flag set
            if (len(to_add) != 1 or len(to_sub) != 0):
                logger.error("Configuration error!  Mismatch between "
                             "{0}'s configuration has "
                             "'not_publishing_deltas' enabled but its data"
                             "file has more than one chunk to serve.".format(
                                 list_info.name))
                raise HTTPInternalServerError()
            resp_payload['lists'][list_info.name]['adddels'] = list_info.adds

    return HTTPOk(content_type="application/octet-stream",
                  body=format_downloads(request, resp_payload))


def format_downloads(request, resp_payload):
    """
    Formats the response body according to protocol version
    """
    body = "n:{0}\n".format(resp_payload['interval'])

    for lname, ldict in resp_payload['lists'].iteritems():
        ldata = ldict['ldata']
        sblist = ldict['sblist']
Exemple #8
0
def test_internal_server_error(dummy_request):
    result = internal_server_error(HTTPInternalServerError(), dummy_request)
    assert_json_content(result, 'Something went wrong on our end')
    assert isinstance(result, HTTPInternalServerError)
Exemple #9
0
def fulltextsearch(request):

    try:
        lang = request.registry.settings['default_locale_name']
    except KeyError:
        return HTTPInternalServerError(
            detail='default_locale_name not defined in settings')
    try:
        lang = _language_dict[lang]
    except KeyError:
        return HTTPInternalServerError(
            detail='%s not defined in _language_dict' % lang)

    if 'query' not in request.params:
        return HTTPBadRequest(detail='no query')
    query = request.params.get('query')

    maxlimit = request.registry.settings.get('fulltextsearch_maxlimit', 200)

    try:
        limit = int(request.params.get(
            'limit',
            request.registry.settings.get('fulltextsearch_defaultlimit', 30)))
    except ValueError:
        return HTTPBadRequest(detail='limit value is incorrect')
    if limit > maxlimit:
        limit = maxlimit

    try:
        partitionlimit = int(request.params.get('partitionlimit', 0))
    except ValueError:
        return HTTPBadRequest(detail='partitionlimit value is incorrect')
    if partitionlimit > maxlimit:
        partitionlimit = maxlimit

    terms = '&'.join(w + ':*' for w in query.split(' ') if w != '')
    _filter = "%(tsvector)s @@ to_tsquery('%(lang)s', '%(terms)s')" % \
        {'tsvector': 'ts', 'lang': lang, 'terms': terms}

    # flake8 does not like `== True`
    if request.user is None:
        _filter = and_(_filter, FullTextSearch.public == True)  # NOQA
    else:
        _filter = and_(
                _filter,
                or_(FullTextSearch.public == True,
                    FullTextSearch.role_id == None,
                    FullTextSearch.role_id == request.user.role.id))  # NOQA

    # The numbers used in ts_rank_cd() below indicate a normalization method.
    # Several normalization methods can be combined using |.
    # 2 divides the rank by the document length
    # 8 divides the rank by the number of unique words in document
    # By combining them, shorter results seem to be preferred over longer ones
    # with the same ratio of matching words. But this relies only on testing it
    # and on some assumptions about how it might be calculated
    # (the normalization is applied two times with the combination of 2 and 8,
    # so the effect on at least the one-word-results is therefore stronger).
    rank = "ts_rank_cd(%(tsvector)s, " \
        "to_tsquery('%(lang)s', '%(terms)s'), 2|8)" % {
            'tsvector': 'ts',
            'lang': lang,
            'terms': terms
        }

    if partitionlimit:
        # Here we want to partition the search results based on
        # layer_name and limit each partition.
        row_number = func.row_number() \
            .over(
                partition_by=FullTextSearch.layer_name,
                order_by=(desc(rank), FullTextSearch.label)) \
            .label('row_number')
        subq = DBSession.query(FullTextSearch) \
            .add_columns(row_number).filter(_filter).subquery()
        query = DBSession.query(subq.c.id, subq.c.label,
                                subq.c.layer_name, subq.c.the_geom)
        query = query.filter(subq.c.row_number <= partitionlimit)
    else:
        query = DBSession.query(FullTextSearch).filter(_filter)
        query = query.order_by(desc(rank))
        query = query.order_by(FullTextSearch.label)

    query = query.limit(limit)
    objs = query.all()

    features = []
    for o in objs:
        if o.the_geom is not None:
            properties = {"label": o.label, "layer_name": o.layer_name}
            geom = wkb_loads(str(o.the_geom.geom_wkb))
            feature = Feature(id=o.id, geometry=geom,
                              properties=properties, bbox=geom.bounds)
            features.append(feature)

    # TODO: add callback function if provided in request, else return geojson
    return FeatureCollection(features)
Exemple #10
0
 def mocked_request(*_, **__):  # noqa: E811
     tmp["retry"] -= 1
     if not tmp["retry"]:
         return mocked_file_response(tmp["json"].name, tmp["http"])
     resp = HTTPInternalServerError()  # internal retry expect at least a 5xx code to retry
     return resp  # will be available on next call (to test retries)
Exemple #11
0
def georeferenceConfirm(request):
    log.info('Receive request for processing georeference validation result')

    try:
        userid = checkIsUser(request)

        request_data = None
        if request.method == 'POST':
            request_data = request.json_body

        mapObj = parseMapObjForId(request_data, 'id', request.db)
        log.debug('Id is valide: %s' % request_data)

        log.debug(
            'Check if there is already a registered georeference process for this messtischblatt ...'
        )
        if Georeferenzierungsprozess.isGeoreferenced(mapObj.id, request.db):
            msg = 'There is already a georeference process for this process. Please load again and start on the latest changes.'
            log.debug(msg)

            georeferenceid = Georeferenzierungsprozess.getActualGeoreferenceProcessForMapId(
                mapObj.id, request.db).id
            response = {'text': msg, 'georeferenceid': georeferenceid}
            return response

        # actual only support this option if target srs is EPSG:4314
        log.debug('Start saving georeference process in the database ...')
        epsg_code = int(
            str(request_data['georeference']['target']).split(':')[1])
        if request_data['georeference'][
                'source'] == 'pixel' and epsg_code == 4314:
            log.debug('Create georeference process record ...')
            timestamp = getTimestampAsPGStr()
            georeference_parameter = str(
                convertUnicodeDictToUtf(request_data['georeference']))
            georefProcess = Georeferenzierungsprozess(
                messtischblattid=mapObj.apsobjectid,
                nutzerid=userid,
                georefparams=ast.literal_eval(georeference_parameter),
                clipparameter=georeference_parameter,
                timestamp=timestamp,
                isactive=True,
                type='new',
                adminvalidation='',
                processed=False,
                mapid=mapObj.id,
                overwrites=0)
            request.db.add(georefProcess)
            request.db.flush()

            log.debug('Create response ...')
            response = {
                'text':
                'Georeference result saved. It will soon be ready for use.',
                'georeferenceid': georefProcess.id,
                'points': 20,
                'gcps': request_data['georeference'],
                'type': 'confirm'
            }
            return response
        else:
            raise GeoreferenceParameterError(
                'Wrong or missing service parameter')

    except GeoreferenceParameterError as e:
        log.error(e)
        log.error(traceback.format_exc())
        raise HTTPBadRequest(ERROR_MSG)
    except Exception as e:
        log.error(e)
        log.error(traceback.format_exc())
        raise HTTPInternalServerError(ERROR_MSG)
Exemple #12
0
def get_default_context(request, **kwargs):
    kwargs.update(default_context)
    from ..auth.util import get_user, get_current_discussion
    if request.scheme == "http"\
            and asbool(config.get("require_secure_connection")):
        raise HTTPFound(get_global_base_url(True) + request.path_qs)
    react_url = '/static2'
    use_webpack_server = asbool(config.get('use_webpack_server'))
    if use_webpack_server:
        # Allow to specify a distinct webpack_host in configuration.
        # Useful for development tests of social auth through a reverse tunnel.
        # Otherwise fallback on public_hostname, then localhost.
        webpack_host = config.get('webpack_host',
                                  config.get('public_hostname', 'localhost'))
        react_url = 'http://%s:%d' % (webpack_host,
                                      int(config.get('webpack_port', 8000)))
    socket_proxied = asbool(config.get('changes_websocket_proxied'))
    websocket_port = None if socket_proxied \
        else config.get('changes_websocket_port')
    secure_socket = socket_proxied and (
        asbool(config.get("require_secure_connection")) or
        (asbool(config.get("accept_secure_connection"))
         and request.url.startswith('https:')))
    application_url = get_global_base_url()
    socket_url = get_global_base_url(
        secure_socket, websocket_port) + config.get('changes_prefix')

    localizer = request.localizer
    _ = TranslationStringFactory('assembl')
    user = get_user(request)
    if user and user.username:
        user_profile_edit_url = request.route_url(
            'profile_user', type='u', identifier=user.username.username)
    elif user:
        user_profile_edit_url = request.route_url('profile_user',
                                                  type='id',
                                                  identifier=user.id)
    else:
        user_profile_edit_url = None

    web_analytics_piwik_script = config.get(
        'web_analytics_piwik_script') or False
    discussion = get_current_discussion()
    if (web_analytics_piwik_script and discussion
            and discussion.web_analytics_piwik_id_site):
        web_analytics_piwik_script = web_analytics_piwik_script % (
            discussion.web_analytics_piwik_id_site,
            discussion.web_analytics_piwik_id_site)
    else:
        web_analytics_piwik_script = False

    web_analytics_piwik_custom_variable_size = config.get(
        'web_analytics_piwik_custom_variable_size')
    if not web_analytics_piwik_custom_variable_size:
        web_analytics_piwik_custom_variable_size = 5

    help_url = config.get('help_url') or ''
    if discussion and discussion.help_url:
        help_url = discussion.help_url
    if help_url and "%s" in help_url:
        help_url = help_url % localizer.locale_name

    first_login_after_auto_subscribe_to_notifications = False
    if (user and discussion and discussion.id and user.is_first_visit
            and discussion.subscribe_to_notifications_on_signup
            and user.is_participant(discussion.id)):
        first_login_after_auto_subscribe_to_notifications = True
    locales = config.get('available_languages').split()
    countries_for_locales = defaultdict(set)
    for locale in locales:
        countries_for_locales[get_language(locale)].add(get_country(locale))
    show_locale_country = {
        locale: (len(countries_for_locales[get_language(locale)]) > 1)
        for locale in locales
    }
    jedfilename = os.path.join(os.path.dirname(__file__), '..', 'locale',
                               localizer.locale_name, 'LC_MESSAGES',
                               'assembl.jed.json')
    if not os.path.exists(jedfilename) and '_' in localizer.locale_name:
        jedfilename = os.path.join(os.path.dirname(__file__), '..', 'locale',
                                   get_language(localizer.locale_name),
                                   'LC_MESSAGES', 'assembl.jed.json')
    assert os.path.exists(jedfilename)

    from ..models.facebook_integration import language_sdk_existance
    fb_lang_exists, fb_locale = language_sdk_existance(
        get_language(localizer.locale_name), countries_for_locales)

    def process_export_list(ls):
        return map(lambda s: s.strip(), ls.split(","))

    social_settings = {
        'fb_export_permissions':
        config.get('facebook.export_permissions'),
        'fb_debug':
        asbool(config.get('facebook.debug_mode')),
        'fb_app_id':
        config.get('facebook.consumer_key'),
        'fb_api_version':
        config.get('facebook.api_version') or '2.2',
        'supported_exports':
        process_export_list(config.get('supported_exports_list'))
    }

    # A container for all analytics related settings. All future
    # analytics based settings that will be exposed to the templates
    # should be included in this dictionary
    analytics_settings = {
        'enabled': True if web_analytics_piwik_script else False,
    }

    if analytics_settings.get('enabled', False):
        analytics_settings['piwik'] = {
            'script': web_analytics_piwik_script,
            'host': config.get('piwik_host')
        }

    analytics_url = config.get('web_analytics_piwik_url', None)

    get_route = create_get_route(request, discussion)
    providers = get_provider_data(get_route)

    errors = request.session.pop_flash()
    if kwargs.get('error', None):
        errors.append(kwargs['error'])
    if errors:
        kwargs['error'] = '<br />'.join(errors)
    messages = request.session.pop_flash('message')
    if messages:
        kwargs['messages'] = '<br />'.join(messages)

    admin_email = config.get('assembl.admin_email', None)
    # If an admin_email is improperly configured, raise an error
    if admin_email is None or admin_email is '':
        raise HTTPInternalServerError(
            explanation=
            "Assembl MUST have an admin_email configured in order to operate.")

    theme_name, theme_relative_path = get_theme_info(discussion)
    node_env = os.getenv('NODE_ENV', 'production')
    under_test = bool(config.get('under_test') or False)
    base = dict(
        kwargs,
        request=request,
        application_url=application_url,
        get_route=get_route,
        user=user,
        templates=get_template_views(),
        discussion=discussion
        or {},  # Templates won't load without a discussion object
        preferences=discussion.preferences if discussion else {},
        user_profile_edit_url=user_profile_edit_url,
        locale=localizer.locale_name,
        locales=locales,
        fb_lang_exists=fb_lang_exists,
        fb_locale=fb_locale,
        social_settings=social_settings,
        show_locale_country=show_locale_country,
        NODE_ENV=node_env,
        theme_name=theme_name,
        theme_relative_path=theme_relative_path,
        minified_js=config.get('minified_js') or False,
        web_analytics=analytics_settings,
        analytics_url=analytics_url,
        help_url=help_url,
        socket_url=socket_url,
        REACT_URL=react_url,
        elasticsearch_lang_indexes=config.get('elasticsearch_lang_indexes',
                                              'en fr'),
        first_login_after_auto_subscribe_to_notifications=
        first_login_after_auto_subscribe_to_notifications,
        sentry_dsn=config.get('sentry_dsn', ''),
        activate_tour=str(config.get('activate_tour') or False).lower(),
        providers=providers,
        providers_json=json.dumps(providers),
        translations=io.open(jedfilename, encoding='utf-8').read(),
        admin_email=admin_email,
        under_test=under_test)

    base.update({
        "opengraph_locale": get_opengraph_locale(request),
        "get_description": get_description(request),
        "get_landing_page_image": get_landing_page_image(),
        "private_social_sharing": private_social_sharing(),
        "get_topic": get_topic(request),
        "get_discussion_url": get_discussion_url(),
        "discussion_title": discussion_title(),
    })
    base.update(get_v1_resources_hash())
    return base
Exemple #13
0
 def api_wrapper(request):
     try:
         return func(request)
     except Exception:
         LOG.exception("Exception during API call:")
         return HTTPInternalServerError("Something bad happened :(")
Exemple #14
0
 def failed_validation(self):
     """Catches colander.Invalid exceptions
     and returns a ExtJS form submission response
     """
     body = {'success': False, 'errors': self.request.exception.asdict()}
     return HTTPInternalServerError(body=json.dumps(body))
Exemple #15
0
 def error(request):
     raise HTTPInternalServerError("oh no")
Exemple #16
0
def call_discourse(method, *args, **kwargs):
    try:
        return method.__call__(*args, **kwargs)
    except Exception as e:
        log.error('Error with Discourse: {}'.format(str(e)), exc_info=True)
        raise HTTPInternalServerError('Error with Discourse')
    def __call__(self, value, system):
        """
        Implements a subclass of pyramid_oereb.lib.renderer.extract.json_.Renderer to create a print result
        out of a json. The json extract is reformatted to fit the structure of mapfish print.

        Args:
            value (tuple): A tuple containing the generated extract record and the params
                dictionary.
            system (dict): The available system properties.

        Returns:
            buffer: The pdf content as received from configured mapfish print instance url.
        """
        log.debug("Parameter webservice is {}".format(value[1]))

        if value[1].images:
            raise HTTPBadRequest('With image is not allowed in the print')

        self._request = self.get_request(system)

        # Create a lower case GET dict to be able to accept all cases of upper and lower case writing
        self._lowercase_GET_dict = dict(
            (k.lower(), v.lower()) for k, v in self._request.GET.iteritems())

        # If a language is specified in the request, use it. Otherwise, use the language from base class
        self._fallback_language = Config.get('default_language')
        if 'lang' in self._lowercase_GET_dict:
            self._language = self._lowercase_GET_dict.get('lang')

        # Based on extract record and webservice parameter, render the extract data as JSON
        extract_record = value[0]
        extract_as_dict = self._render(extract_record, value[1])
        feature_geometry = mapping(extract_record.real_estate.limit)
        pdf_to_join = set()

        if Config.get('print', {}).get('compute_toc_pages', False):
            extract_as_dict['nbTocPages'] = TocPages(
                extract_as_dict).getNbPages()
        else:
            extract_as_dict['nbTocPages'] = 1

        self.convert_to_printable_extract(extract_as_dict, feature_geometry,
                                          pdf_to_join)

        print_config = Config.get('print', {})

        extract_as_dict[
            'Display_RealEstate_SubunitOfLandRegister'] = print_config.get(
                'display_real_estate_subunit_of_land_register', True)

        extract_as_dict['Display_Certification'] = print_config.get(
            'display_certification', True)

        spec = {
            'layout': Config.get('print', {})['template_name'],
            'outputFormat': 'pdf',
            'lang': self._language,
            'attributes': extract_as_dict,
        }

        response = self.get_response(system)

        if self._request.GET.get('getspec', 'no') != 'no':
            response.headers[
                'Content-Type'] = 'application/json; charset=UTF-8'
            return json.dumps(spec, sort_keys=True, indent=4)
        pdf_url = urlparse.urljoin(
            Config.get('print', {})['base_url'] + '/', 'buildreport.pdf')
        pdf_headers = Config.get('print', {})['headers']
        print_result = requests.post(pdf_url,
                                     headers=pdf_headers,
                                     data=json.dumps(spec))
        try:
            if Config.get('print', {}).get('compute_toc_pages', False):
                with io.BytesIO() as pdf:
                    pdf.write(print_result.content)
                    pdf_reader = PdfFileReader(pdf)
                    x = []
                    for i in range(len(pdf_reader.getOutlines())):
                        x.append(pdf_reader.getOutlines()[i]['/Page']
                                 ['/StructParents'])
                    try:
                        true_nb_of_toc = min(x) - 1
                    except ValueError:
                        true_nb_of_toc = 1

                    if true_nb_of_toc != extract_as_dict['nbTocPages']:
                        log.warning(
                            'nbTocPages in result pdf: {} are not equal to the one predicted : {}, request new pdf'
                            .format(true_nb_of_toc,
                                    extract_as_dict['nbTocPages']))  # noqa
                        extract_as_dict['nbTocPages'] = true_nb_of_toc
                        print_result = requests.post(pdf_url,
                                                     headers=pdf_headers,
                                                     data=json.dumps(spec))
        except PdfReadError as e:
            err_msg = 'a problem occurred while generating the pdf file'
            log.error(err_msg + ': ' + str(e))
            raise HTTPInternalServerError(err_msg)

        if not extract_as_dict['isReduced'] and print_result.status_code == 200:
            main = tempfile.NamedTemporaryFile(suffix='.pdf')
            main.write(print_result.content)
            main.flush()
            cmd = ['pdftk', main.name]
            temp_files = [main]
            for url in pdf_to_join:
                result = requests.get(url)
                content_type = result.headers.get('content-type')
                log.debug("document url: " + url + " => content_type: " +
                          content_type)
                if content_type != 'application/pdf':
                    msg = "Skipped document inclusion (url: '{}') because content_type: '{}'"
                    log.warning(msg.format(url, content_type))
                    continue
                tmp_file = tempfile.NamedTemporaryFile(suffix='.pdf')
                tmp_file.write(result.content)
                tmp_file.flush()
                temp_files.append(tmp_file)
                cmd.append(tmp_file.name)
            out = tempfile.NamedTemporaryFile(suffix='.pdf')
            cmd += ['cat', 'output', out.name]
            sys.stdout.flush()
            time.sleep(0.1)
            subprocess.check_call(cmd)
            content = out.file.read()
        else:
            content = print_result.content

        # Save printed file to the specified path.
        pdf_archive_path = print_config.get('pdf_archive_path', None)
        if pdf_archive_path is not None:
            self.archive_pdf_file(pdf_archive_path, content, extract_as_dict)

        response.status_code = print_result.status_code
        response.headers = print_result.headers
        if 'Transfer-Encoding' in response.headers:
            del response.headers['Transfer-Encoding']
        if 'Connection' in response.headers:
            del response.headers['Connection']
        return content
Exemple #18
0
    def _proxy(self, _url, params, use_cache, method, body, headers):
        # name of the JSON callback (value for the "callback" query string param
        # in the self.request). None if self.request has no "callback" param in the query
        # string
        callback = params.get('callback')

        # get query string
        params_encoded = {}
        for k, v in params.iteritems():
            if k == 'callback':
                continue
            params_encoded[k] = unicode(v).encode('utf-8')
        query_string = urllib.urlencode(params_encoded)

        _url += '?' + query_string
        log.info("Querying mapserver proxy at URL: %s." % _url)

        # forward self.request to target (without Host Header)
        http = httplib2.Http()
        headers = dict(headers)
        if urlparse(_url).hostname != 'localhost':  # pragma: no cover
            headers.pop('Host')
        # mapserver don't need the cookie, and sometimes it failed with it.
        if 'Cookie' in headers:  # pragma: no cover
            headers.pop('Cookie')
        try:
            resp, content = http.request(
                _url, method=method, body=body, headers=headers
            )
        except:  # pragma: no cover
            log.error(
                "Error '%s' while getting the URL: %s." %
                (sys.exc_info()[0], _url))
            if method == "POST":
                log.error("--- With body ---")
                log.error(body)
            return HTTPBadGateway("See logs for details")  # pragma: no cover

        if resp.status != 200:
            log.error("\nError\n '%s'\n in response from URL:\n %s\n "
                      "with query:\n %s" %
                      (resp.reason, _url, body))  # pragma: no cover
            return HTTPInternalServerError(
                "See logs for details")  # pragma: no cover

        # check for allowed content types
        if "content-type" not in resp:
            return HTTPNotAcceptable()  # pragma: no cover

        if method == "POST" and is_get_feature(body) and \
                self.settings.get('enable_limit_featurecollection', True):
            content = limit_featurecollection(
                content,
                limit=self.settings.get('maxfeatures', 200)
            )

        content_type = None
        if callback:
            content_type = "application/javascript"
            # escape single quotes in the JavaScript string
            content = unicode(content.decode('utf8'))
            content = content.replace(u"'", ur"\'")
            content = u"%s('%s');" % (callback, u' '.join(content.splitlines()))
        else:
            content_type = resp["content-type"]

        headers = {"Content-Type": content_type}
        response = Response(content, status=resp.status, headers=headers)

        if use_cache:
            response.cache_control.public = True
            response.cache_control.max_age = self.request.registry.settings["default_max_age"]
        else:
            response.cache_control.no_cache = True

        return response
Exemple #19
0
        except InvalidPermission, message:
            return JSONHTTPBadRequest(
                error=dict(error=InvalidPermission.__name__,
                           error_description=message.value))
        except ValidationError, message:
            return JSONHTTPBadRequest(
                error=dict(error=ValidationError.__name__,
                           error_description=message.value))

        # JSON decode error????
        except ValueError:
            return JSONHTTPBadRequest(error=dict(
                error='JSONDecodeError',
                error_description='Invalid JSON data found on requests body'))
        except:
            return HTTPInternalServerError()
        else:
            try:
                # Don't cache by default, get configuration from resource if any
                route_cache_settings = RESOURCES.get(
                    request.matched_route.name).get(
                        'cache',
                        'must-revalidate, max-age=0, no-cache, no-store')
                response.headers.update(
                    {'Cache-Control': route_cache_settings})
            except:
                pass
            return response

    return replacement
Exemple #20
0
 def test_format_content_json_str_invalid_usage(self):
     non_json_serializable_content = {"key": HTTPInternalServerError()}
     utils.check_raises(
         lambda: ax.format_content_json_str(200, "", non_json_serializable_content, CONTENT_TYPE_JSON),
         HTTPInternalServerError, msg="invalid content format expected as JSON serializable should raise"
     )