Esempio n. 1
0
def test_multidict_encoding():
    d = OrderedMultiDict()
    d.add("2013-10-10T23:26:05.657975+0000", "2013-10-10T23:26:05.657975+0000")
    assert (
        urls.url_encode(d)
        == "2013-10-10T23%3A26%3A05.657975%2B0000=2013-10-10T23%3A26%3A05.657975%2B0000"
    )
Esempio n. 2
0
    def get_with_data(*args, **kwargs):
        filters = kwargs.pop('filters', None)
        limit = kwargs.pop('limit', None)
        offset = kwargs.pop('offset', 0)
        order = kwargs.pop('order', Reservation.start_dt)
        limit_per_room = kwargs.pop('limit_per_room', False)
        occurs_on = kwargs.pop('occurs_on')
        if kwargs:
            raise ValueError('Unexpected kwargs: {}'.format(kwargs))

        query = Reservation.query.options(joinedload(Reservation.room))
        if filters:
            query = query.filter(*filters)
        if occurs_on:
            query = query.filter(
                Reservation.id.in_(db.session.query(ReservationOccurrence.reservation_id)
                                   .filter(ReservationOccurrence.date.in_(occurs_on),
                                           ReservationOccurrence.is_valid))
            )
        if limit_per_room and (limit or offset):
            query = limit_groups(query, Reservation, Reservation.room_id, order, limit, offset)

        query = query.order_by(order, Reservation.created_dt)

        if not limit_per_room:
            if limit:
                query = query.limit(limit)
            if offset:
                query = query.offset(offset)

        result = OrderedDict((r.id, {'reservation': r}) for r in query)

        if 'vc_equipment' in args:
            vc_id_subquery = db.session.query(EquipmentType.id) \
                .correlate(Reservation) \
                .filter_by(name='Video conference') \
                .join(RoomEquipmentAssociation) \
                .filter(RoomEquipmentAssociation.c.room_id == Reservation.room_id) \
                .as_scalar()

            # noinspection PyTypeChecker
            vc_equipment_data = dict(db.session.query(Reservation.id, static_array.array_agg(EquipmentType.name))
                                     .join(ReservationEquipmentAssociation, EquipmentType)
                                     .filter(Reservation.id.in_(result.iterkeys()))
                                     .filter(EquipmentType.parent_id == vc_id_subquery)
                                     .group_by(Reservation.id))

            for id_, data in result.iteritems():
                data['vc_equipment'] = vc_equipment_data.get(id_, ())

        if 'occurrences' in args:
            occurrence_data = OrderedMultiDict(db.session.query(ReservationOccurrence.reservation_id,
                                                                ReservationOccurrence)
                                               .filter(ReservationOccurrence.reservation_id.in_(result.iterkeys()))
                                               .order_by(ReservationOccurrence.start_dt))
            for id_, data in result.iteritems():
                data['occurrences'] = occurrence_data.getlist(id_)

        return result.values()
Esempio n. 3
0
File: xor.py Progetto: Mattias-/xor
 def __get_query_args(req):
     d = OrderedMultiDict()
     if len(req.query_string) > 0:
         arg_list = req.query_string.split('&')
         arg_list = map(unquote, arg_list)
         for arg in arg_list:
             spl = arg.split('=', 1)
             spl.append(None)
             d.add(spl[0], spl[1])
     return d
Esempio n. 4
0
File: xor.py Progetto: Mattias-/xor
 def __get_post_args(req):
     d = OrderedMultiDict()
     if req.method == 'POST':
         content = req.environ['wsgi.input']
         post_data = content.read(req.content_length)
         arg_list = post_data.split('&')
         arg_list = map(unquote, arg_list)
         for arg in arg_list:
             spl = arg.split('=', 1)
             spl.append(None)
             d.add(spl[0], spl[1])
     return d
Esempio n. 5
0
 def test_ordered_multidict_encoding(self):
     d = OrderedMultiDict()
     d.add("foo", 1)
     d.add("foo", 2)
     d.add("foo", 3)
     d.add("bar", 0)
     d.add("foo", 4)
     assert urls.url_encode(d) == "foo=1&foo=2&foo=3&bar=0&foo=4"
Esempio n. 6
0
def test_ordered_multidict_encoding():
    d = OrderedMultiDict()
    d.add('foo', 1)
    d.add('foo', 2)
    d.add('foo', 3)
    d.add('bar', 0)
    d.add('foo', 4)
    assert urls.url_encode(d) == 'foo=1&foo=2&foo=3&bar=0&foo=4'
Esempio n. 7
0
 def test_ordered_multidict_encoding(self):
     d = OrderedMultiDict()
     d.add('foo', 1)
     d.add('foo', 2)
     d.add('foo', 3)
     d.add('bar', 0)
     d.add('foo', 4)
     self.assert_equal(urls.url_encode(d), 'foo=1&foo=2&foo=3&bar=0&foo=4')
Esempio n. 8
0
 def __init__(self, parsed):
     parsed = helpers.ensure_soup(parsed)
     if parsed.name != 'form':
         parsed = parsed.find('form')
     self.parsed = parsed
     self.action = self.parsed.get('action')
     self.method = self.parsed.get('method', 'get')
     self.fields = OrderedMultiDict()
     for field in _parse_fields(self.parsed):
         self.add_field(field)
Esempio n. 9
0
def test_ordered_multidict_encoding():
    """"Make sure URLs are properly encoded from OrderedMultiDicts"""
    d = OrderedMultiDict()
    d.add('foo', 1)
    d.add('foo', 2)
    d.add('foo', 3)
    d.add('bar', 0)
    d.add('foo', 4)
    assert url_encode(d) == 'foo=1&foo=2&foo=3&bar=0&foo=4'
Esempio n. 10
0
    def get_with_data(*args, **kwargs):
        filters = kwargs.pop('filters', None)
        limit = kwargs.pop('limit', None)
        offset = kwargs.pop('offset', 0)
        order = kwargs.pop('order', Reservation.start_dt)
        limit_per_room = kwargs.pop('limit_per_room', False)
        occurs_on = kwargs.pop('occurs_on')
        if kwargs:
            raise ValueError('Unexpected kwargs: {}'.format(kwargs))

        query = Reservation.query.options(joinedload(Reservation.room))
        if filters:
            query = query.filter(*filters)
        if occurs_on:
            query = query.filter(
                Reservation.id.in_(db.session.query(ReservationOccurrence.reservation_id)
                                   .filter(ReservationOccurrence.date.in_(occurs_on),
                                           ReservationOccurrence.is_valid))
            )
        if limit_per_room and (limit or offset):
            query = limit_groups(query, Reservation, Reservation.room_id, order, limit, offset)

        query = query.order_by(order, Reservation.created_dt)

        if not limit_per_room:
            if limit:
                query = query.limit(limit)
            if offset:
                query = query.offset(offset)

        result = OrderedDict((r.id, {'reservation': r}) for r in query)

        if 'occurrences' in args:
            occurrence_data = OrderedMultiDict(db.session.query(ReservationOccurrence.reservation_id,
                                                                ReservationOccurrence)
                                               .filter(ReservationOccurrence.reservation_id.in_(result.iterkeys()))
                                               .order_by(ReservationOccurrence.start_dt))
            for id_, data in result.iteritems():
                data['occurrences'] = occurrence_data.getlist(id_)

        return result.values()
Esempio n. 11
0
def test_multidict_pickle():
    """MultiDict types are pickle-able"""
    for protocol in xrange(pickle.HIGHEST_PROTOCOL + 1):
        print 'pickling protocol', protocol
        d = MultiDict()
        d.setlist('foo', [1, 2, 3, 4])
        d.setlist('bar', 'foo bar baz'.split())
        s = pickle.dumps(d, protocol)
        ud = pickle.loads(s)
        assert type(ud) is type(d)
        assert ud == d
        assert pickle.loads(s.replace('werkzeug.datastructures', 'werkzeug')) == d
        ud['newkey'] = 'bla'
        assert ud != d

        d2 = OrderedMultiDict(d)
        d2.add('foo', 5)
        s = pickle.dumps(d2, protocol)
        ud = pickle.loads(s)
        assert type(ud) is type(d2)
        assert ud == d2
        ud['newkey'] = 'bla'
        print ud
        print d2
        assert ud != d2

        im = ImmutableMultiDict(d)
        assert im == d
        s = pickle.dumps(im, protocol)
        ud = pickle.loads(s)
        assert ud == im
        assert type(ud) is type(im)

        c = CombinedMultiDict([ud, im])
        cc = pickle.loads(pickle.dumps(c, protocol))
        assert c == cc
        assert type(c) is type(cc)
Esempio n. 12
0
def test_multidict_pickle():
    """MultiDict types are pickle-able"""
    for protocol in xrange(pickle.HIGHEST_PROTOCOL + 1):
        print "pickling protocol", protocol
        d = MultiDict()
        d.setlist("foo", [1, 2, 3, 4])
        d.setlist("bar", "foo bar baz".split())
        s = pickle.dumps(d, protocol)
        ud = pickle.loads(s)
        assert type(ud) is type(d)
        assert ud == d
        assert pickle.loads(s.replace("werkzeug.datastructures", "werkzeug")) == d
        ud["newkey"] = "bla"
        assert ud != d

        d2 = OrderedMultiDict(d)
        d2.add("foo", 5)
        s = pickle.dumps(d2, protocol)
        ud = pickle.loads(s)
        assert type(ud) is type(d2)
        assert ud == d2
        ud["newkey"] = "bla"
        print ud
        print d2
        assert ud != d2

        im = ImmutableMultiDict(d)
        assert im == d
        s = pickle.dumps(im, protocol)
        ud = pickle.loads(s)
        assert ud == im
        assert type(ud) is type(im)

        c = CombinedMultiDict([ud, im])
        cc = pickle.loads(pickle.dumps(c, protocol))
        assert c == cc
        assert type(c) is type(cc)
Esempio n. 13
0
 def _is_canonical_url(self, canonical_params):
     """Returns whether the current request URL is canonical."""
     self.ensure_one()
     # Compare OrderedMultiDict because the order is important, there must be
     # only one canonical and not params permutations.
     params = request.httprequest.args
     canonical_params = canonical_params or OrderedMultiDict()
     if params != canonical_params:
         return False
     # Compare URL at the first rerouting iteration (if available) because
     # it's the one with the language in the path.
     # It is important to also test the domain of the current URL.
     current_url = request.httprequest.url_root[:-1] + (
         hasattr(request, 'rerouting') and request.rerouting[0]
         or request.httprequest.path)
     canonical_url = self._get_canonical_url_localized(
         lang=request.lang, canonical_params=None)
     # A request path with quotable characters (such as ",") is never
     # canonical because request.httprequest.base_url is always unquoted,
     # and canonical url is always quoted, so it is never possible to tell
     # if the current URL is indeed canonical or not.
     return current_url == canonical_url
Esempio n. 14
0
    def export_room(self, user):
        loc = Location.find_first(name=self._location)
        if loc is None:
            return

        # Retrieve rooms
        rooms_data = list(
            Room.get_with_data(
                'vc_equipment',
                'non_vc_equipment',
                filters=[Room.id.in_(self._ids), Room.location_id == loc.id]))

        # Retrieve reservations
        reservations = None
        if self._detail == 'reservations':
            reservations = OrderedMultiDict(
                _export_reservations(self, True, False, [
                    Reservation.room_id.in_(x['room'].id for x in rooms_data)
                ]))

        for result in rooms_data:
            yield _serializable_room(result, reservations)
Esempio n. 15
0
    def __init__(self):
        """
        Creates a new request.

        The request is probably useless right now, but the HTTP parser will then go on to set the right attributes on
        it.
        """
        # Empty values.
        self.method = ""

        # This differs from path/query because it's the full `/a/b/?c=d`.
        # This is then urlsplit into a path and query string in _parse_path.
        self.full_path = b""

        self.path = ""
        self.query = ""
        self.version = ""

        # Empty body, as this isn't known until it's passed in.
        self.body = ""

        self.cookies = cookies.SimpleCookie()

        # We use a Headers object here as it serves our purposes the best.
        self.headers = Headers()

        # Args, values, and forms are OrderedMultiDicts.
        # So are files.
        self.args = OrderedMultiDict()
        self._form = OrderedMultiDict()
        self.values = OrderedMultiDict()
        self.files = OrderedMultiDict()

        # Protocol-specific data.
        self.ip = ""
        self.port = 0

        # Extra values, for hooks.
        self.extra = {}

        self.should_keep_alive = False
Esempio n. 16
0
    def test_upload_multiple_files_should_be_successful(self, mock_logger):
        username = "******"
        message_key = "MAP_UPLOAD_SUCCESS"

        # Upload 7 files
        filenames = [
            "map_1500.dat", "map_2000.dat", "map_1501.dat", "map_1502.dat",
            "map_1001.dat", "map_1503.dat", "map_1504.dat"
        ]

        for filename in filenames:
            self.copy_test_data_file("existing_unlocked.dat", self.uploads_dir,
                                     filename)

        original_files = self.load_test_data_files(filenames)

        data = OrderedMultiDict()
        data.add("userName", username)

        for filename in original_files:
            data.add("map", (BytesIO(original_files[filename]), filename))

        response = self.perform_upload(data)

        assert response.status_code == 200
        assert response.mimetype == "text/html"

        logger_calls = []

        for filename in original_files:
            # Verify that the new map files were uploaded and locked
            expected_nbt_file = self.load_test_data_nbt_file(filename)
            uploaded_nbt_file = self.load_uploaded_nbt_file(filename)
            self.verify_matching_nbt_values(expected_nbt_file,
                                            uploaded_nbt_file)

            assert get_nbt_map_value(uploaded_nbt_file, "locked") == 1

            self.verify_flash_message_by_key(message_key, response.data,
                                             filename)
            logger_calls.append(
                call(self.get_log_message(message_key), filename, username))

        mock_logger.info.assert_has_calls(logger_calls, any_order=True)
Esempio n. 17
0
    def test_upload_multiple_files_should_be_successful(self, mock_logger):
        username = "******"
        message_key = "SCHEMATIC_UPLOAD_SUCCESS"

        # Upload 5 files
        filenames = [
            "mrt_v5_final_elevated_centre_station.schem",
            "mrt_v5_final_elevated_side_station.schematic",
            "mrt_v5_final_elevated_single_track.schematic",
            "mrt_v5_final_elevated_double_track.schematic",
            "mrt_v5_final_elevated_double_curve.schematic"
        ]

        original_files = self.load_test_data_files(filenames)

        data = OrderedMultiDict()
        data.add("userName", username)

        for filename in original_files:
            data.add("schematic",
                     (BytesIO(original_files[filename]), filename))

        response = self.perform_upload(data)

        assert response.status_code == 200
        assert response.mimetype == "text/html"

        logger_calls = []

        for filename in original_files:
            uploaded_filename = self.uploaded_filename(username, filename)

            self.verify_file_content(self.uploads_dir, uploaded_filename,
                                     original_files[filename])

            self.verify_flash_message_by_key(message_key, response.data,
                                             uploaded_filename)
            logger_calls.append(
                call(self.get_log_message(message_key), uploaded_filename,
                     username))

        mock_logger.info.assert_has_calls(logger_calls, any_order=True)
Esempio n. 18
0
    def test_upload_with_invalid_file_should_fail(self, mock_logger, filename,
                                                  message_key):
        username = "******"
        uploaded_filename = self.uploaded_filename(username, filename)
        original_file_content = self.load_test_data_file(filename)

        data = OrderedMultiDict()
        data.add("userName", username)
        data.add("schematic", (BytesIO(original_file_content), filename))

        response = self.perform_upload(data)

        assert response.status_code == 200
        assert response.mimetype == "text/html"

        self.verify_schematic_uploads_dir_is_empty()

        self.verify_flash_message_by_key(message_key, response.data,
                                         uploaded_filename)
        mock_logger.warn.assert_called_with(self.get_log_message(message_key),
                                            uploaded_filename, username)
Esempio n. 19
0
    def test_upload_with_too_many_files_should_fail(self, mock_logger):
        username = "******"
        message_key = "SCHEMATIC_UPLOAD_TOO_MANY_FILES"

        # Upload 12 files, over the limit of 10.
        filenames = [
            "mrt_v5_final_elevated_centre_station.schematic",
            "mrt_v5_final_elevated_side_station.schematic",
            "mrt_v5_final_elevated_single_track.schematic",
            "mrt_v5_final_elevated_double_track.schematic",
            "mrt_v5_final_elevated_double_curve.schematic",
            "mrt_v5_final_ground_centre_station.schematic",
            "mrt_v5_final_ground_side_station.schematic",
            "mrt_v5_final_ground_single_track.schematic",
            "mrt_v5_final_ground_double_track.schematic",
            "mrt_v5_final_ground_double_curve.schematic",
            "mrt_v5_final_subground_centre_station.schematic",
            "mrt_v5_final_subground_side_station.schematic"
        ]

        original_files = self.load_test_data_files(filenames)

        data = OrderedMultiDict()
        data.add("userName", username)

        for filename in original_files:
            data.add("schematic",
                     (BytesIO(original_files[filename]), filename))

        response = self.perform_upload(data)

        assert response.status_code == 200
        assert response.mimetype == "text/html"

        self.verify_schematic_uploads_dir_is_empty()

        self.verify_flash_message_by_key(message_key, response.data)
        mock_logger.warn.assert_called_with(self.get_log_message(message_key),
                                            username)
Esempio n. 20
0
    def test_upload_with_too_many_files_should_fail(self, mock_logger):
        username = "******"
        existing_filename = "existing_unlocked.dat"
        message_key = "MAP_UPLOAD_TOO_MANY_FILES"

        # Upload 11 files, over the limit of 10.
        filenames = [
            "map_1001.dat", "map_1500.dat", "map_1501.dat", "map_1502.dat",
            "map_1503.dat", "map_1504.dat", "map_1505.dat", "map_1506.dat",
            "map_1507.dat", "map_1508.dat", "map_2000.dat"
        ]

        for filename in filenames:
            self.copy_test_data_file(existing_filename, self.uploads_dir,
                                     filename)

        existing_file_content = self.load_test_data_file(existing_filename)

        upload_files = self.load_test_data_files(filenames)

        data = OrderedMultiDict()
        data.add("userName", username)

        for filename in upload_files:
            data.add("map", (BytesIO(upload_files[filename]), filename))

        response = self.perform_upload(data)

        assert response.status_code == 200
        assert response.mimetype == "text/html"

        # Verify that none of the existing map files were overwritten
        for filename in filenames:
            self.verify_file_content(self.uploads_dir, filename,
                                     existing_file_content)

        self.verify_flash_message_by_key(message_key, response.data)
        mock_logger.warn.assert_called_with(self.get_log_message(message_key),
                                            username)
Esempio n. 21
0
    def test_upload_with_invalid_filename_should_fail(self, mock_logger,
                                                      filename):
        username = "******"
        message_key = "MAP_UPLOAD_FILENAME_INVALID"

        upload_file_content = self.load_test_data_file(filename)

        data = OrderedMultiDict()
        data.add("userName", username)
        data.add("map", (BytesIO(upload_file_content), filename))

        response = self.perform_upload(data)

        assert response.status_code == 200
        assert response.mimetype == "text/html"

        # Verify that the new map file was NOT uploaded
        uploaded_file_path = os.path.join(self.uploads_dir, filename)
        assert os.path.isfile(uploaded_file_path) == False

        self.verify_flash_message_by_key(message_key, response.data, filename)
        mock_logger.warn.assert_called_with(self.get_log_message(message_key),
                                            filename, username)
Esempio n. 22
0
    def test_upload_single_file_should_be_successful(self, mock_logger,
                                                     filename):
        username = "******"
        uploaded_filename = self.uploaded_filename(username, filename)
        original_file_content = self.load_test_data_file(filename)
        message_key = "SCHEMATIC_UPLOAD_SUCCESS"

        data = OrderedMultiDict()
        data.add("userName", username)
        data.add("schematic", (BytesIO(original_file_content), filename))

        response = self.perform_upload(data)

        assert response.status_code == 200
        assert response.mimetype == "text/html"

        self.verify_file_content(self.uploads_dir, uploaded_filename,
                                 original_file_content)

        self.verify_flash_message_by_key(message_key, response.data,
                                         uploaded_filename)
        mock_logger.info.assert_called_with(self.get_log_message(message_key),
                                            uploaded_filename, username)
Esempio n. 23
0
def test_multidict_encode_decode_text(t1, t2):
    d = OrderedMultiDict()
    d.add(t1, t2)
    assert d == urls.url_decode(urls.url_encode(d))
Esempio n. 24
0
    def events(self, page=1, **searches):
        Event = request.env['event.event']
        SudoEventType = request.env['event.type'].sudo()

        searches.setdefault('search', '')
        searches.setdefault('date', 'upcoming')
        searches.setdefault('tags', '')
        searches.setdefault('type', 'all')
        searches.setdefault('country', 'all')

        website = request.website

        step = 12  # Number of events per page

        options = {
            'displayDescription': False,
            'displayDetail': False,
            'displayExtraDetail': False,
            'displayExtraLink': False,
            'displayImage': False,
            'allowFuzzy': not searches.get('noFuzzy'),
            'date': searches.get('date'),
            'tags': searches.get('tags'),
            'type': searches.get('type'),
            'country': searches.get('country'),
        }
        order = 'date_begin'
        if searches.get('date', 'upcoming') == 'old':
            order = 'date_begin desc'
        order = 'is_published desc, ' + order
        search = searches.get('search')
        event_count, details, fuzzy_search_term = website._search_with_fuzzy(
            "events", search, limit=page * step, order=order, options=options)
        event_details = details[0]
        events = event_details.get('results', Event)
        events = events[(page - 1) * step:page * step]

        # count by domains without self search
        domain_search = [
            ('name', 'ilike', fuzzy_search_term or searches['search'])
        ] if searches['search'] else []

        no_date_domain = event_details['no_date_domain']
        dates = event_details['dates']
        for date in dates:
            if date[0] not in ['all', 'old']:
                date[3] = Event.search_count(
                    expression.AND(no_date_domain) + domain_search + date[2])

        no_country_domain = event_details['no_country_domain']
        countries = Event.read_group(expression.AND(no_country_domain) +
                                     domain_search, ["id", "country_id"],
                                     groupby="country_id",
                                     orderby="country_id")
        countries.insert(
            0, {
                'country_id_count':
                sum([
                    int(country['country_id_count']) for country in countries
                ]),
                'country_id': ("all", _("All Countries"))
            })

        search_tags = event_details['search_tags']
        current_date = event_details['current_date']
        current_type = None
        current_country = None

        if searches["type"] != 'all':
            current_type = SudoEventType.browse(int(searches['type']))

        if searches["country"] != 'all' and searches["country"] != 'online':
            current_country = request.env['res.country'].browse(
                int(searches['country']))

        pager = website.pager(url="/event",
                              url_args=searches,
                              total=event_count,
                              page=page,
                              step=step,
                              scope=5)

        keep = QueryURL(
            '/event', **{
                key: value
                for key, value in searches.items() if (key == 'search' or (
                    value != 'upcoming' if key == 'date' else value != 'all'))
            })

        searches['search'] = fuzzy_search_term or search

        values = {
            'current_date':
            current_date,
            'current_country':
            current_country,
            'current_type':
            current_type,
            'event_ids':
            events,  # event_ids used in website_event_track so we keep name as it is
            'dates':
            dates,
            'categories':
            request.env['event.tag.category'].search([('is_published', '=',
                                                       True)]),
            'countries':
            countries,
            'pager':
            pager,
            'searches':
            searches,
            'search_tags':
            search_tags,
            'keep':
            keep,
            'search_count':
            event_count,
            'original_search':
            fuzzy_search_term and search,
        }

        if searches['date'] == 'old':
            # the only way to display this content is to set date=old so it must be canonical
            values['canonical_params'] = OrderedMultiDict([('date', 'old')])

        return request.render("website_event.index", values)
Esempio n. 25
0
class Request(object):
    """
    A Request object.

    This should not be manually created. Instead, it is automatically provided by Kyokai.

    If you must create one, use :meth:`from_data` or :meth:`parse`.

    :ivar method: The HTTP method (GET, POST, PUT, etc)
    :ivar path: The full path of the request (``/api/v1/whatever``)
    :ivar headers: A :class:`IOrderedDict` representing the headers of the request.
    :ivar query: The raw query string (``a=b&c=d``)
    :ivar body: The raw body of the request.

    :ivar cookies: A :class:`cookies.SimpleCookie` containing the cookies of the request.

    :ivar args: The arguments from the query string, parsed out.
    :ivar form: The form data for the request. If the request was JSON, this is automatically parsed out.
    :ivar values: THe arguments and the form combined.

    :ivar source: The source IP of the request.
    """
    def __init__(self):
        """
        Creates a new request.

        The request is probably useless right now, but the HTTP parser will then go on to set the right attributes on
        it.
        """
        # Empty values.
        self.method = ""

        # This differs from path/query because it's the full `/a/b/?c=d`.
        # This is then urlsplit into a path and query string in _parse_path.
        self.full_path = b""

        self.path = ""
        self.query = ""
        self.version = ""

        # Empty body, as this isn't known until it's passed in.
        self.body = ""

        self.cookies = cookies.SimpleCookie()

        # We use a Headers object here as it serves our purposes the best.
        self.headers = Headers()

        # Args, values, and forms are OrderedMultiDicts.
        # So are files.
        self.args = OrderedMultiDict()
        self._form = OrderedMultiDict()
        self.values = OrderedMultiDict()
        self.files = OrderedMultiDict()

        # Protocol-specific data.
        self.ip = ""
        self.port = 0

        # Extra values, for hooks.
        self.extra = {}

        self.should_keep_alive = False

    @property
    def form(self) -> dict:
        """
        Returns the form data for the specified request.
        JSON forms are lazy loaded. This means that parsing is done in the first call to `.form`, rather than when
        the request is created.
        """
        if self._form:
            return self._form
        # Parse JSON, otherwise.
        if self.headers.get("Content-Type") == "application/json":
            self._form = json.loads(self.body)
            self.values.update(self._form if self._form else {})
        return self._form

    def _parse_path(self):
        """
        urlsplits the full path.
        """
        split = uparse.urlsplit(self.full_path.decode())
        self.path = split.path
        self.query = split.query

    def _parse_query(self):
        """
        Parses the query string, and updates `args` with it as appropriate.
        """
        new_args = uparse.parse_qs(self.query)
        # Unpack the urlparsed arguments.
        for name, value in new_args.items():
            if len(value) == 1:
                self.args[name] = value[0]
            elif len(value) == 0:
                self.args[name] = None
            else:
                self.args[name] = value

    def _parse_body(self):
        """
        Parses the body data.
        """
        if self.headers.get("Content-Type") != "application/json":
            # Parse the form data out.
            f_parser = formparser.FormDataParser()

            # Wrap the body in a BytesIO.
            body = BytesIO(self.body.encode())

            # The headers can't be directly passed into Werkzeug.
            # Instead, we have to get a the custom content type, then pass in some fake WSGI options.
            mimetype, c_t_args = parse_options_header(
                self.headers.get("Content-Type"))

            if mimetype:
                # We have a valid mimetype.
                # This is good!
                # Now parse the body.

                # Construct a fake WSGI environment.
                env = {
                    "Content-Type": self.headers.get("Content-Type"),
                    "Content-Length": self.headers.get("Content-Length")
                }

                # Take the boundary out of the Content-Type, if applicable.
                boundary = c_t_args.get("boundary")
                if boundary is not None:
                    env["boundary"] = boundary

                # Get a good content length.
                content_length = self.headers.get("Content-Length")
                try:
                    content_length = int(content_length)
                except ValueError:
                    content_length = len(self.body)
                except TypeError:
                    # NoneType...
                    raise HTTPException(411)

                # Then, the form body itself is parsed.

                data = f_parser.parse(body,
                                      mimetype,
                                      content_length,
                                      options=env)

                # Extract the new data from the form parser.
                self._form.update(data[1])
                self.files.update(data[2])

    def parse_all(self):
        """
        Called when all data is parsed.

        This tells the request to re-parse everything based off of the raw data.

        This is an internal method.

        .. versionadded:: 1.9
        """
        # Call _parse_path to parse the path.
        self._parse_path()
        # Call _parse_query to parse the query string.
        self._parse_query()
        # Call _parse_body to parse the body.
        self._parse_body()

        # Load cookies.
        cookie_header = self.headers.get_all("Cookie")
        for c in cookie_header:
            self.cookies.load(c)
Esempio n. 26
0
    def events(self, page=1, **searches):
        Event = request.env['event.event']
        EventType = request.env['event.type']

        searches.setdefault('search', '')
        searches.setdefault('date', 'all')
        searches.setdefault('type', 'all')
        searches.setdefault('country', 'all')

        website = request.website
        today = datetime.today()

        def sdn(date):
            return fields.Datetime.to_string(
                date.replace(hour=23, minute=59, second=59))

        def sd(date):
            return fields.Datetime.to_string(date)

        def get_month_filter_domain(filter_name, months_delta):
            first_day_of_the_month = today.replace(day=1)
            filter_string = _('This month') if months_delta == 0 \
                else format_date(request.env, value=today + relativedelta(months=months_delta),
                                 date_format='LLLL', lang_code=get_lang(request.env).code).capitalize()
            return [
                filter_name, filter_string,
                [("date_end", ">=",
                  sd(first_day_of_the_month +
                     relativedelta(months=months_delta))),
                 ("date_begin", "<",
                  sd(first_day_of_the_month +
                     relativedelta(months=months_delta + 1)))], 0
            ]

        dates = [
            ['all',
             _('Upcoming Events'), [("date_end", ">", sd(today))], 0],
            [
                'today',
                _('Today'),
                [("date_end", ">", sd(today)),
                 ("date_begin", "<", sdn(today))], 0
            ],
            get_month_filter_domain('month', 0),
            get_month_filter_domain('nextmonth1', 1),
            get_month_filter_domain('nextmonth2', 2),
            ['old', _('Past Events'), [("date_end", "<", sd(today))], 0],
        ]

        # search domains
        domain_search = {'website_specific': website.website_domain()}

        if searches['search']:
            domain_search['search'] = [('name', 'ilike', searches['search'])]

        current_date = None
        current_type = None
        current_country = None
        for date in dates:
            if searches["date"] == date[0]:
                domain_search["date"] = date[2]
                if date[0] != 'all':
                    current_date = date[1]

        if searches["type"] != 'all':
            current_type = EventType.browse(int(searches['type']))
            domain_search["type"] = [("event_type_id", "=",
                                      int(searches["type"]))]

        if searches["country"] != 'all' and searches["country"] != 'online':
            current_country = request.env['res.country'].browse(
                int(searches['country']))
            domain_search["country"] = [
                '|', ("country_id", "=", int(searches["country"])),
                ("country_id", "=", False)
            ]
        elif searches["country"] == 'online':
            domain_search["country"] = [("country_id", "=", False)]

        def dom_without(without):
            domain = []
            for key, search in domain_search.items():
                if key != without:
                    domain += search
            return domain

        # count by domains without self search
        for date in dates:
            if date[0] != 'old':
                date[3] = Event.search_count(dom_without('date') + date[2])

        domain = dom_without('type')
        types = Event.read_group(domain, ["id", "event_type_id"],
                                 groupby=["event_type_id"],
                                 orderby="event_type_id")
        types.insert(
            0, {
                'event_type_id_count':
                sum([int(type['event_type_id_count']) for type in types]),
                'event_type_id': ("all", _("All Categories"))
            })

        domain = dom_without('country')
        countries = Event.read_group(domain, ["id", "country_id"],
                                     groupby="country_id",
                                     orderby="country_id")
        countries.insert(
            0, {
                'country_id_count':
                sum([
                    int(country['country_id_count']) for country in countries
                ]),
                'country_id': ("all", _("All Countries"))
            })

        step = 12  # Number of events per page
        event_count = Event.search_count(dom_without("none"))
        pager = website.pager(url="/event",
                              url_args=searches,
                              total=event_count,
                              page=page,
                              step=step,
                              scope=5)

        order = 'date_begin'
        if searches.get('date', 'all') == 'old':
            order = 'date_begin desc'
        if searches[
                "country"] != 'all':  # if we are looking for a specific country
            order = 'is_online, ' + order  # show physical events first
        order = 'is_published desc, ' + order
        events = Event.search(dom_without("none"),
                              limit=step,
                              offset=pager['offset'],
                              order=order)

        keep = QueryURL(
            '/event', **{
                key: value
                for key, value in searches.items()
                if (key == 'search' or value != 'all')
            })

        values = {
            'current_date': current_date,
            'current_country': current_country,
            'current_type': current_type,
            'event_ids':
            events,  # event_ids used in website_event_track so we keep name as it is
            'dates': dates,
            'types': types,
            'countries': countries,
            'pager': pager,
            'searches': searches,
            'keep': keep,
        }

        if searches['date'] == 'old':
            # the only way to display this content is to set date=old so it must be canonical
            values['canonical_params'] = OrderedMultiDict([('date', 'old')])

        return request.render("website_event.index", values)
Esempio n. 27
0
 def test_multidict_encoding(self):
     d = OrderedMultiDict()
     d.add('2013-10-10T23:26:05.657975+0000', '2013-10-10T23:26:05.657975+0000')
     self.assert_equal(urls.url_encode(d), '2013-10-10T23%3A26%3A05.657975%2B0000=2013-10-10T23%3A26%3A05.657975%2B0000')
Esempio n. 28
0
    def get_with_data(*args, **kwargs):
        filters = kwargs.pop('filters', None)
        limit = kwargs.pop('limit', None)
        offset = kwargs.pop('offset', 0)
        order = kwargs.pop('order', Reservation.start_dt)
        limit_per_room = kwargs.pop('limit_per_room', False)
        occurs_on = kwargs.pop('occurs_on')
        if kwargs:
            raise ValueError('Unexpected kwargs: {}'.format(kwargs))

        query = Reservation.query.options(joinedload(Reservation.room))
        if filters:
            query = query.filter(*filters)
        if occurs_on:
            query = query.filter(
                Reservation.id.in_(
                    db.session.query(
                        ReservationOccurrence.reservation_id).filter(
                            ReservationOccurrence.date.in_(occurs_on),
                            ReservationOccurrence.is_valid)))
        if limit_per_room and (limit or offset):
            query = limit_groups(query, Reservation, Reservation.room_id,
                                 order, limit, offset)

        query = query.order_by(order, Reservation.created_dt)

        if not limit_per_room:
            if limit:
                query = query.limit(limit)
            if offset:
                query = query.offset(offset)

        result = OrderedDict((r.id, {'reservation': r}) for r in query)

        if 'vc_equipment' in args:
            vc_id_subquery = db.session.query(EquipmentType.id) \
                .correlate(Reservation) \
                .filter_by(name='Video conference') \
                .join(RoomEquipmentAssociation) \
                .filter(RoomEquipmentAssociation.c.room_id == Reservation.room_id) \
                .as_scalar()

            # noinspection PyTypeChecker
            vc_equipment_data = dict(
                db.session.query(
                    Reservation.id,
                    static_array.array_agg(EquipmentType.name)).join(
                        ReservationEquipmentAssociation, EquipmentType).filter(
                            Reservation.id.in_(result.iterkeys())).filter(
                                EquipmentType.parent_id ==
                                vc_id_subquery).group_by(Reservation.id))

            for id_, data in result.iteritems():
                data['vc_equipment'] = vc_equipment_data.get(id_, ())

        if 'occurrences' in args:
            occurrence_data = OrderedMultiDict(
                db.session.query(ReservationOccurrence.reservation_id,
                                 ReservationOccurrence).filter(
                                     ReservationOccurrence.reservation_id.in_(
                                         result.iterkeys())).order_by(
                                             ReservationOccurrence.start_dt))
            for id_, data in result.iteritems():
                data['occurrences'] = occurrence_data.getlist(id_)

        return result.values()
Esempio n. 29
0
def _filter_fields(fields, predicate):
    return OrderedMultiDict([
        (key, value)
        for key, value in fields.items(multi=True)
        if predicate(value)
    ])
Esempio n. 30
0
class Form(object):
    """Representation of an HTML form."""

    def __init__(self, parsed):
        parsed = helpers.ensure_soup(parsed)
        if parsed.name != 'form':
            parsed = parsed.find('form')
        self.parsed = parsed
        self.action = self.parsed.get('action')
        self.method = self.parsed.get('method', 'get')
        self.fields = OrderedMultiDict()
        for field in _parse_fields(self.parsed):
            self.add_field(field)

    def add_field(self, field):
        """Add a field.

        :param field: Field to add
        :raise: ValueError if `field` is not an instance of `BaseField`.

        """
        if not isinstance(field, fields.BaseField):
            raise ValueError('Argument "field" must be an instance of '
                             'BaseField')
        self.fields.add(field.name, field)

    @property
    def submit_fields(self):
        return _filter_fields(
            self.fields,
            lambda field: isinstance(field, fields.Submit)
        )

    @encode_if_py2
    def __repr__(self):
        state = u', '.join(
            [
                u'{0}={1}'.format(name, field.value)
                for name, field in self.fields.items(multi=True)
            ]
        )
        if state:
            return u'<RoboForm {0}>'.format(state)
        return u'<RoboForm>'

    def keys(self):
        return self.fields.keys()

    def __getitem__(self, item):
        return self.fields[item]

    def __setitem__(self, key, value):
        self.fields[key].value = value

    def serialize(self, submit=None):
        """Serialize each form field to a Payload container.

        :param Submit submit: Optional `Submit` to click, if form includes
            multiple submits
        :return: Payload instance

        """
        include_fields = prepare_fields(self.fields, self.submit_fields, submit)
        return Payload.from_fields(include_fields)
 def test_get_data_with_assurance(self):
     assert self.question(assuranceApproach='2answers-type1').get_data(
         OrderedMultiDict([('example', 'value1'), ('example', 'value2'), ('example--assurance', 'assurance value')])
     ) == {'example': {'value': ['value1', 'value2'], 'assurance': 'assurance value'}}
 def test_get_data(self):
     assert self.question().get_data(
         OrderedMultiDict([('example', 'value1'), ('example', 'value2')])
     ) == {'example': ['value1', 'value2']}
Esempio n. 33
0
    def events(self, page=1, **searches):
        Event = request.env['event.event']
        EventType = request.env['event.type']

        searches.setdefault('date', 'all')
        searches.setdefault('type', 'all')
        searches.setdefault('country', 'all')

        website = request.website

        def sdn(date):
            return fields.Datetime.to_string(
                date.replace(hour=23, minute=59, second=59))

        def sd(date):
            return fields.Datetime.to_string(date)

        today = datetime.today()
        dates = [
            ['all', _('Next Events'), [("date_end", ">", sd(today))], 0],
            [
                'today',
                _('Today'),
                [("date_end", ">", sd(today)),
                 ("date_begin", "<", sdn(today))], 0
            ],
            [
                'week',
                _('This Week'),
                [("date_end", ">=",
                  sd(today + relativedelta(days=-today.weekday()))),
                 ("date_begin", "<",
                  sdn(today + relativedelta(days=6 - today.weekday())))], 0
            ],
            [
                'nextweek',
                _('Next Week'),
                [("date_end", ">=",
                  sd(today + relativedelta(days=7 - today.weekday()))),
                 ("date_begin", "<",
                  sdn(today + relativedelta(days=13 - today.weekday())))], 0
            ],
            [
                'month',
                _('This month'),
                [("date_end", ">=", sd(today.replace(day=1))),
                 ("date_begin", "<",
                  (today.replace(day=1) +
                   relativedelta(months=1)).strftime('%Y-%m-%d 00:00:00'))], 0
            ],
            [
                'nextmonth',
                _('Next month'),
                [("date_end", ">=",
                  sd(today.replace(day=1) + relativedelta(months=1))),
                 ("date_begin", "<",
                  (today.replace(day=1) +
                   relativedelta(months=2)).strftime('%Y-%m-%d 00:00:00'))], 0
            ],
            [
                'old',
                _('Past Events'),
                [("date_end", "<", today.strftime('%Y-%m-%d 00:00:00'))], 0
            ],
        ]

        # search domains
        domain_search = {'website_specific': website.website_domain()}
        current_date = None
        current_type = None
        current_country = None
        for date in dates:
            if searches["date"] == date[0]:
                domain_search["date"] = date[2]
                if date[0] != 'all':
                    current_date = date[1]
        if searches["type"] != 'all':
            current_type = EventType.browse(int(searches['type']))
            domain_search["type"] = [("event_type_id", "=",
                                      int(searches["type"]))]

        if searches["country"] != 'all' and searches["country"] != 'online':
            current_country = request.env['res.country'].browse(
                int(searches['country']))
            domain_search["country"] = [
                '|', ("country_id", "=", int(searches["country"])),
                ("country_id", "=", False)
            ]
        elif searches["country"] == 'online':
            domain_search["country"] = [("country_id", "=", False)]

        def dom_without(without):
            domain = [('state', "in", ['draft', 'confirm', 'done'])]
            for key, search in domain_search.items():
                if key != without:
                    domain += search
            return domain

        # count by domains without self search
        for date in dates:
            if date[0] != 'old':
                date[3] = Event.search_count(dom_without('date') + date[2])

        domain = dom_without('type')
        types = Event.read_group(domain, ["id", "event_type_id"],
                                 groupby=["event_type_id"],
                                 orderby="event_type_id")
        types.insert(
            0, {
                'event_type_id_count':
                sum([int(type['event_type_id_count']) for type in types]),
                'event_type_id': ("all", _("All Categories"))
            })

        domain = dom_without('country')
        countries = Event.read_group(domain, ["id", "country_id"],
                                     groupby="country_id",
                                     orderby="country_id")
        countries.insert(
            0, {
                'country_id_count':
                sum([
                    int(country['country_id_count']) for country in countries
                ]),
                'country_id': ("all", _("All Countries"))
            })

        step = 10  # Number of events per page
        event_count = Event.search_count(dom_without("none"))
        pager = website.pager(url="/event",
                              url_args={
                                  'date': searches.get('date'),
                                  'type': searches.get('type'),
                                  'country': searches.get('country')
                              },
                              total=event_count,
                              page=page,
                              step=step,
                              scope=5)

        order = 'date_begin'
        if searches.get('date', 'all') == 'old':
            order = 'date_begin desc'
        if searches[
                "country"] != 'all':  # if we are looking for a specific country
            order = 'is_online, ' + order  # show physical events first
        order = 'is_published desc, ' + order
        events = Event.search(dom_without("none"),
                              limit=step,
                              offset=pager['offset'],
                              order=order)

        values = {
            'current_date': current_date,
            'current_country': current_country,
            'current_type': current_type,
            'event_ids':
            events,  # event_ids used in website_event_track so we keep name as it is
            'dates': dates,
            'types': types,
            'countries': countries,
            'pager': pager,
            'searches': searches,
            'search_path': "?%s" % werkzeug.url_encode(searches),
        }

        if searches['date'] == 'old':
            # the only way to display this content is to set date=old so it must be canonical
            values['canonical_params'] = OrderedMultiDict([('date', 'old')])

        return request.render("website_event.index", values)
Esempio n. 34
0
def test_ordered_multidict():
    """Test the OrderedMultiDict"""
    d = OrderedMultiDict()
    assert not d
    d.add("foo", "bar")
    assert len(d) == 1
    d.add("foo", "baz")
    assert len(d) == 1
    assert d.items() == [("foo", "bar")]
    assert list(d) == ["foo"]
    assert d.items(multi=True) == [("foo", "bar"), ("foo", "baz")]
    del d["foo"]
    assert not d
    assert len(d) == 0
    assert list(d) == []

    d.update([("foo", 1), ("foo", 2), ("bar", 42)])
    d.add("foo", 3)
    assert d.getlist("foo") == [1, 2, 3]
    assert d.getlist("bar") == [42]
    assert d.items() == [("foo", 1), ("bar", 42)]
    assert d.keys() == list(d) == list(d.iterkeys()) == ["foo", "bar"]
    assert d.items(multi=True) == [("foo", 1), ("foo", 2), ("bar", 42), ("foo", 3)]
    assert len(d) == 2

    assert d.pop("foo") == 1
    assert d.pop("blafasel", None) is None
    assert d.pop("blafasel", 42) == 42
    assert len(d) == 1
    assert d.poplist("bar") == [42]
    assert not d

    d.get("missingkey") is None

    d.add("foo", 42)
    d.add("foo", 23)
    d.add("bar", 2)
    d.add("foo", 42)
    assert d == MultiDict(d)
    id = ImmutableOrderedMultiDict(d)
    assert d == id
    d.add("foo", 2)
    assert d != id

    d.update({"blah": [1, 2, 3]})
    assert d["blah"] == 1
    assert d.getlist("blah") == [1, 2, 3]

    # setlist works
    d = OrderedMultiDict()
    d["foo"] = 42
    d.setlist("foo", [1, 2])
    assert d.getlist("foo") == [1, 2]

    assert_raises(OrderedMultiDict.KeyError, d.pop, "missing")
    assert_raises(OrderedMultiDict.KeyError, d.__getitem__, "missing")

    # popping
    d = OrderedMultiDict()
    d.add("foo", 23)
    d.add("foo", 42)
    d.add("foo", 1)
    assert d.popitem() == ("foo", 23)
    assert_raises(OrderedMultiDict.KeyError, d.popitem)
    assert not d

    d.add("foo", 23)
    d.add("foo", 42)
    d.add("foo", 1)
    assert d.popitemlist() == ("foo", [23, 42, 1])
    assert_raises(OrderedMultiDict.KeyError, d.popitemlist)
Esempio n. 35
0
class StatementHistory(object):

    """Keeps a history of SQL statements with execution time and offers some pretty printing options.
    No precautions for thread safety.
    """

    def __init__(self):
        self._reset()

    def _reset(self):
        self._statements = OrderedMultiDict()
        self.last_statement = None
        self.last_duration = None
        self.last_timestamp = None

    def append(self, stmt, timestamp, duration, notices):
        self.last_statement = stmt
        self.last_timestamp = timestamp
        self.last_duration = duration
        self.last_notices = notices
        self._statements.add(stmt, (timestamp, duration, notices))

    def clear(self):
        # clear() does not work on OrderedMultiDict, bug in werkzeug?
        self._reset()

    @property
    def statements(self):
        return self._statements.keys()

    @property
    def statements_with_all_infos(self):
        return iteritems(self._statements)
    
    @property
    def statements_with_time(self):
        return ((k, v[0]) for k, v in iteritems(self._statements))

    def format_statement(self, stmt, highlight=True, time=0, duration=0, pygments_style=DEFAULT_PYGMENTS_STYLE, formatter_cls=None):
        show_time = time and duration
        highlight_format_stmt = _make_statement_formatter(show_time, highlight, pygments_style, formatter_cls)
        return highlight_format_stmt(stmt)

    def print_last_statement(self, show_time=True, highlight=True, pygments_style=DEFAULT_PYGMENTS_STYLE):
        if self.last_statement is None:
            print("history is empty")
            return

        highlight_format_stmt = _make_statement_formatter(show_time, highlight, pygments_style)
        print()
        print(highlight_format_stmt(self.last_statement, self.last_timestamp, self.last_duration))

    def print_statements(self, show_time=True, highlight=True, pygments_style=DEFAULT_PYGMENTS_STYLE):
        if self.last_statement is None:
            print("history is empty")
            return

        highlight_format_stmt = _make_statement_formatter(show_time, highlight, pygments_style)
        print()

        for stmt, (timestamp, duration) in self._statements.items(multi=True):
            print(highlight_format_stmt(stmt, timestamp, duration))
Esempio n. 36
0
 def __init__(self):
     self.data = OrderedMultiDict()
     self.options = collections.defaultdict(OrderedMultiDict)
Esempio n. 37
0
class Form(object):
    """Representation of an HTML form."""

    def __init__(self, parsed):
        parsed = helpers.ensure_soup(parsed)
        if parsed.name != 'form':
            parsed = parsed.find('form')
        self.parsed = parsed
        self.action = self.parsed.get('action')
        self.method = self.parsed.get('method', 'get')
        self.fields = OrderedMultiDict()
        for field in _parse_fields(self.parsed):
            self.add_field(field)

    def add_field(self, field):
        """Add a field.

        :param field: Field to add
        :raise: ValueError if `field` is not an instance of `BaseField`.

        """
        if not isinstance(field, fields.BaseField):
            raise ValueError('Argument "field" must be an instance of '
                             'BaseField')
        if (isinstance(field, fields.Checkbox) or isinstance(field, fields.Radio)) \
                and field.name in self.fields:
            self.fields[field.name].options.extend(field.options)
        else:
            self.fields.add(field.name, field)

    @property
    def submit_fields(self):
        return _filter_fields(
            self.fields,
            lambda field: isinstance(field, fields.Submit)
        )

    @encode_if_py2
    def __repr__(self):
        state = u', '.join(
            [
                u'{0}={1}'.format(name, field.value)
                for name, field in self.fields.items(multi=True)
            ]
        )
        if state:
            return u'<RoboForm {0}>'.format(state)
        return u'<RoboForm>'

    def keys(self):
        return self.fields.keys()

    def __getitem__(self, item):
        return self.fields[item]

    def __setitem__(self, key, value):
        self.fields[key].value = value

    def serialize(self, submit=None):
        """Serialize each form field to a Payload container.

        :param Submit submit: Optional `Submit` to click, if form includes
            multiple submits
        :return: Payload instance

        """
        include_fields = prepare_fields(
            self.fields, self.submit_fields, submit)
        return Payload.from_fields(include_fields)
Esempio n. 38
0
def test_multidict_encode_decode_text(t1, t2):
    d = OrderedMultiDict()
    d.add(t1, t2)
    assert d == urls.url_decode(urls.url_encode(d))
Esempio n. 39
0
def test_ordered_multidict():
    """Test the OrderedMultiDict"""
    d = OrderedMultiDict()
    assert not d
    d.add('foo', 'bar')
    assert len(d) == 1
    d.add('foo', 'baz')
    assert len(d) == 1
    assert d.items() == [('foo', 'bar')]
    assert list(d) == ['foo']
    assert d.items(multi=True) == [('foo', 'bar'),
                                   ('foo', 'baz')]
    del d['foo']
    assert not d
    assert len(d) == 0
    assert list(d) == []

    d.update([('foo', 1), ('foo', 2), ('bar', 42)])
    d.add('foo', 3)
    assert d.getlist('foo') == [1, 2, 3]
    assert d.getlist('bar') == [42]
    assert d.items() == [('foo', 1), ('bar', 42)]
    assert d.keys() == list(d) == list(d.iterkeys()) == ['foo', 'bar']
    assert d.items(multi=True) == [('foo', 1), ('foo', 2),
                                   ('bar', 42), ('foo', 3)]
    assert len(d) == 2

    assert d.pop('foo') == 1
    assert d.pop('blafasel', None) is None
    assert d.pop('blafasel', 42) == 42
    assert len(d) == 1
    assert d.poplist('bar') == [42]
    assert not d

    d.get('missingkey') is None

    d.add('foo', 42)
    d.add('foo', 23)
    d.add('bar', 2)
    d.add('foo', 42)
    assert d == MultiDict(d)
    id = ImmutableOrderedMultiDict(d)
    assert d == id
    d.add('foo', 2)
    assert d != id

    d.update({'blah': [1, 2, 3]})
    assert d['blah'] == 1
    assert d.getlist('blah') == [1, 2, 3]

    # setlist works
    d = OrderedMultiDict()
    d['foo'] = 42
    d.setlist('foo', [1, 2])
    assert d.getlist('foo') == [1, 2]

    assert_raises(OrderedMultiDict.KeyError, d.pop, 'missing')
    assert_raises(OrderedMultiDict.KeyError, d.__getitem__, 'missing')

    # popping
    d = OrderedMultiDict()
    d.add('foo', 23)
    d.add('foo', 42)
    d.add('foo', 1)
    assert d.popitem() == ('foo', 23)
    assert_raises(OrderedMultiDict.KeyError, d.popitem)
    assert not d

    d.add('foo', 23)
    d.add('foo', 42)
    d.add('foo', 1)
    assert d.popitemlist() == ('foo', [23, 42, 1])
    assert_raises(OrderedMultiDict.KeyError, d.popitemlist)
Esempio n. 40
0
class StatementHistory(object):
    """Keeps a history of SQL statements with execution time and offers some pretty printing options.
    No precautions for thread safety.
    """
    def __init__(self):
        self._reset()

    def _reset(self):
        self._statements = OrderedMultiDict()
        self.last_statement = None
        self.last_duration = None
        self.last_timestamp = None

    def append(self, stmt, timestamp, duration, notices):
        self.last_statement = stmt
        self.last_timestamp = timestamp
        self.last_duration = duration
        self.last_notices = notices
        self._statements.add(stmt, (timestamp, duration, notices))

    def clear(self):
        # clear() does not work on OrderedMultiDict, bug in werkzeug?
        self._reset()

    @property
    def statements(self):
        return self._statements.keys()

    @property
    def statements_with_all_infos(self):
        return iteritems(self._statements)

    @property
    def statements_with_time(self):
        return ((k, v[0]) for k, v in iteritems(self._statements))

    def format_statement(self,
                         stmt,
                         highlight=True,
                         time=0,
                         duration=0,
                         pygments_style=DEFAULT_PYGMENTS_STYLE,
                         formatter_cls=None):
        show_time = time and duration
        highlight_format_stmt = _make_statement_formatter(
            show_time, highlight, pygments_style, formatter_cls)
        return highlight_format_stmt(stmt)

    def print_last_statement(self,
                             show_time=True,
                             highlight=True,
                             pygments_style=DEFAULT_PYGMENTS_STYLE):
        if self.last_statement is None:
            print("history is empty")
            return

        highlight_format_stmt = _make_statement_formatter(
            show_time, highlight, pygments_style)
        print()
        print(
            highlight_format_stmt(self.last_statement, self.last_timestamp,
                                  self.last_duration))

    def print_statements(self,
                         show_time=True,
                         highlight=True,
                         pygments_style=DEFAULT_PYGMENTS_STYLE):
        if self.last_statement is None:
            print("history is empty")
            return

        highlight_format_stmt = _make_statement_formatter(
            show_time, highlight, pygments_style)
        print()

        for stmt, (timestamp, duration) in self._statements.items(multi=True):
            print(highlight_format_stmt(stmt, timestamp, duration))
Esempio n. 41
0
def test_ordered_multidict():
    """Test the OrderedMultiDict"""
    d = OrderedMultiDict()
    assert not d
    d.add('foo', 'bar')
    assert len(d) == 1
    d.add('foo', 'baz')
    assert len(d) == 1
    assert d.items() == [('foo', 'bar')]
    assert list(d) == ['foo']
    assert d.items(multi=True) == [('foo', 'bar'), ('foo', 'baz')]
    del d['foo']
    assert not d
    assert len(d) == 0
    assert list(d) == []

    d.update([('foo', 1), ('foo', 2), ('bar', 42)])
    d.add('foo', 3)
    assert d.getlist('foo') == [1, 2, 3]
    assert d.getlist('bar') == [42]
    assert d.items() == [('foo', 1), ('bar', 42)]
    assert d.keys() == list(d) == list(d.iterkeys()) == ['foo', 'bar']
    assert d.items(multi=True) == [('foo', 1), ('foo', 2), ('bar', 42),
                                   ('foo', 3)]
    assert len(d) == 2

    assert d.pop('foo') == 1
    assert d.pop('blafasel', None) is None
    assert d.pop('blafasel', 42) == 42
    assert len(d) == 1
    assert d.poplist('bar') == [42]
    assert not d

    d.get('missingkey') is None

    d.add('foo', 42)
    d.add('foo', 23)
    d.add('bar', 2)
    d.add('foo', 42)
    assert d == MultiDict(d)
    id = ImmutableOrderedMultiDict(d)
    assert d == id
    d.add('foo', 2)
    assert d != id

    d.update({'blah': [1, 2, 3]})
    assert d['blah'] == 1
    assert d.getlist('blah') == [1, 2, 3]

    # setlist works
    d = OrderedMultiDict()
    d['foo'] = 42
    d.setlist('foo', [1, 2])
    assert d.getlist('foo') == [1, 2]

    assert_raises(OrderedMultiDict.KeyError, d.pop, 'missing')
    assert_raises(OrderedMultiDict.KeyError, d.__getitem__, 'missing')

    # popping
    d = OrderedMultiDict()
    d.add('foo', 23)
    d.add('foo', 42)
    d.add('foo', 1)
    assert d.popitem() == ('foo', 23)
    assert_raises(OrderedMultiDict.KeyError, d.popitem)
    assert not d

    d.add('foo', 23)
    d.add('foo', 42)
    d.add('foo', 1)
    assert d.popitemlist() == ('foo', [23, 42, 1])
    assert_raises(OrderedMultiDict.KeyError, d.popitemlist)
Esempio n. 42
0
 def _reset(self):
     self._statements = OrderedMultiDict()
     self.last_statement = None
     self.last_duration = None
     self.last_timestamp = None
Esempio n. 43
0
    def test_upload_same_file_twice_should_fail(self, mock_logger):
        username = "******"
        filename = "map_1500.dat"
        message_key = "MAP_UPLOAD_EXISTING_MAP_LOCKED"

        self.copy_test_data_file("existing_unlocked.dat", self.uploads_dir,
                                 filename)

        original_file_content = self.load_test_data_file(filename)

        first_data = OrderedMultiDict()
        first_data.add("userName", username)
        first_data.add("map", (BytesIO(original_file_content), filename))

        first_response = self.perform_upload(first_data)

        second_data = OrderedMultiDict()
        second_data.add("userName", username)
        second_data.add("map", (BytesIO(original_file_content), filename))

        second_response = self.perform_upload(second_data)

        assert second_response.status_code == 200
        assert second_response.mimetype == "text/html"

        # Verify that the new map file was uploaded and locked,
        # but the "existing file locked" error message appears after second upload
        expected_nbt_file = self.load_test_data_nbt_file(filename)
        uploaded_nbt_file = self.load_uploaded_nbt_file(filename)

        self.verify_matching_nbt_values(expected_nbt_file, uploaded_nbt_file)

        assert get_nbt_map_value(uploaded_nbt_file, "locked") == 1

        self.verify_flash_message_by_key(message_key, second_response.data,
                                         filename)
        mock_logger.warn.assert_called_with(self.get_log_message(message_key),
                                            filename, username)
Esempio n. 44
0
def test_url_encoding_dict_str_list(d):
    assert OrderedMultiDict(d) == urls.url_decode(urls.url_encode(d))
Esempio n. 45
0
def test_multidict_encoding():
    d = OrderedMultiDict()
    d.add('2013-10-10T23:26:05.657975+0000', '2013-10-10T23:26:05.657975+0000')
    assert urls.url_encode(
        d
    ) == '2013-10-10T23%3A26%3A05.657975%2B0000=2013-10-10T23%3A26%3A05.657975%2B0000'
Esempio n. 46
0
def test_url_encoding_dict_str_int(d):
    assert OrderedMultiDict({k: str(v)
                             for k, v in d.items()
                             }) == urls.url_decode(urls.url_encode(d))
Esempio n. 47
0
    def events(self, page=1, **searches):
        Event = request.env['event.event']
        EventType = request.env['event.type']

        searches.setdefault('search', '')
        searches.setdefault('date', 'all')
        searches.setdefault('tags', '')
        searches.setdefault('type', 'all')
        searches.setdefault('country', 'all')

        website = request.website
        today = datetime.today()

        def sdn(date):
            return fields.Datetime.to_string(date.replace(hour=23, minute=59, second=59))

        def sd(date):
            return fields.Datetime.to_string(date)

        def get_month_filter_domain(filter_name, months_delta):
            first_day_of_the_month = today.replace(day=1)
            filter_string = _('This month') if months_delta == 0 \
                else format_date(request.env, value=today + relativedelta(months=months_delta),
                                 date_format='LLLL', lang_code=get_lang(request.env).code).capitalize()
            return [filter_name, filter_string, [
                ("date_end", ">=", sd(first_day_of_the_month + relativedelta(months=months_delta))),
                ("date_begin", "<", sd(first_day_of_the_month + relativedelta(months=months_delta+1)))],
                0]

        dates = [
            ['all', _('Upcoming Events'), [("date_end", ">", sd(today))], 0],
            ['today', _('Today'), [
                ("date_end", ">", sd(today)),
                ("date_begin", "<", sdn(today))],
                0],
            get_month_filter_domain('month', 0),
            get_month_filter_domain('nextmonth1', 1),
            get_month_filter_domain('nextmonth2', 2),
            ['old', _('Past Events'), [
                ("date_end", "<", sd(today))],
                0],
        ]

        # search domains
        domain_search = {'website_specific': website.website_domain()}

        if searches['search']:
            domain_search['search'] = [('name', 'ilike', searches['search'])]

        search_tags = self._extract_searched_event_tags(searches)
        if search_tags:
            # Example: You filter on age: 10-12 and activity: football.
            # Doing it this way allows to only get events who are tagged "age: 10-12" AND "activity: football".
            # Add another tag "age: 12-15" to the search and it would fetch the ones who are tagged:
            # ("age: 10-12" OR "age: 12-15") AND "activity: football
            grouped_tags = defaultdict(list)
            for tag in search_tags:
                grouped_tags[tag.category_id].append(tag)
            domain_search['tags'] = []
            for group in grouped_tags:
                domain_search['tags'] = expression.AND([domain_search['tags'], [('tag_ids', 'in', [tag.id for tag in grouped_tags[group]])]])

        current_date = None
        current_type = None
        current_country = None
        for date in dates:
            if searches["date"] == date[0]:
                domain_search["date"] = date[2]
                if date[0] != 'all':
                    current_date = date[1]

        if searches["type"] != 'all':
            current_type = EventType.browse(int(searches['type']))
            domain_search["type"] = [("event_type_id", "=", int(searches["type"]))]

        if searches["country"] != 'all':
            current_country = request.env['res.country'].browse(int(searches['country']))
            domain_search["country"] = ['|', ("country_id", "=", int(searches["country"])), ("country_id", "=", False)]

        def dom_without(without):
            domain = []
            for key, search in domain_search.items():
                if key != without:
                    domain += search
            return domain

        # count by domains without self search
        for date in dates:
            if date[0] != 'old':
                date[3] = Event.search_count(dom_without('date') + date[2])

        domain = dom_without('type')

        domain = dom_without('country')
        countries = Event.read_group(domain, ["id", "country_id"], groupby="country_id", orderby="country_id")
        countries.insert(0, {
            'country_id_count': sum([int(country['country_id_count']) for country in countries]),
            'country_id': ("all", _("All Countries"))
        })

        step = 12  # Number of events per page
        event_count = Event.search_count(dom_without("none"))
        pager = website.pager(
            url="/event",
            url_args=searches,
            total=event_count,
            page=page,
            step=step,
            scope=5)

        order = 'date_begin'
        if searches.get('date', 'all') == 'old':
            order = 'date_begin desc'
        order = 'is_published desc, ' + order
        events = Event.search(dom_without("none"), limit=step, offset=pager['offset'], order=order)

        keep = QueryURL('/event', **{key: value for key, value in searches.items() if (key == 'search' or value != 'all')})

        values = {
            'current_date': current_date,
            'current_country': current_country,
            'current_type': current_type,
            'event_ids': events,  # event_ids used in website_event_track so we keep name as it is
            'dates': dates,
            'categories': request.env['event.tag.category'].search([]),
            'countries': countries,
            'pager': pager,
            'searches': searches,
            'search_tags': search_tags,
            'keep': keep,
        }

        if searches['date'] == 'old':
            # the only way to display this content is to set date=old so it must be canonical
            values['canonical_params'] = OrderedMultiDict([('date', 'old')])

        return request.render("website_event.index", values)
Esempio n. 48
0
 def _reset(self):
     self._statements = OrderedMultiDict()
     self.last_statement = None
     self.last_duration = None
     self.last_timestamp = None