def test_upload_single_file_should_be_successful(self, mock_logger): username = "******" filename = "map_1500.dat" message_key = "MAP_UPLOAD_SUCCESS" self.copy_test_data_file("existing_unlocked.dat", self.uploads_dir, filename) original_file_content = self.load_test_data_file(filename) data = OrderedMultiDict() data.add("userName", username) data.add("map", (BytesIO(original_file_content), filename)) response = self.perform_upload(data) assert response.status_code == 200 assert response.mimetype == "text/html" # Verify that the new map file was uploaded and locked expected_nbt_file = self.load_test_data_nbt_file(filename) uploaded_nbt_file = self.load_uploaded_nbt_file(filename) self.verify_matching_nbt_values(expected_nbt_file, uploaded_nbt_file) assert get_nbt_map_value(uploaded_nbt_file, "locked") == 1 self.verify_flash_message_by_key(message_key, response.data, filename) mock_logger.info.assert_called_with(self.get_log_message(message_key), filename, username)
def test_upload_where_existing_file_is_already_locked_should_fail( self, mock_logger): username = "******" filename = "map_1500.dat" existing_filename = "existing_locked.dat" message_key = "MAP_UPLOAD_EXISTING_MAP_LOCKED" self.copy_test_data_file(existing_filename, self.uploads_dir, filename) existing_file_content = self.load_test_data_file(existing_filename) upload_file_content = self.load_test_data_file(filename) data = OrderedMultiDict() data.add("userName", username) data.add("map", (BytesIO(upload_file_content), filename)) response = self.perform_upload(data) assert response.status_code == 200 assert response.mimetype == "text/html" # Verify that the existing map file was NOT overwritten self.verify_file_content(self.uploads_dir, filename, existing_file_content) self.verify_flash_message_by_key(message_key, response.data, filename) mock_logger.warn.assert_called_with(self.get_log_message(message_key), filename, username)
def test_multidict_encoding(): d = OrderedMultiDict() d.add("2013-10-10T23:26:05.657975+0000", "2013-10-10T23:26:05.657975+0000") assert ( urls.url_encode(d) == "2013-10-10T23%3A26%3A05.657975%2B0000=2013-10-10T23%3A26%3A05.657975%2B0000" )
def test_upload_with_invalid_filename_and_file_already_exists_should_fail( self, mock_logger, filename): username = "******" message_key = "MAP_UPLOAD_FILENAME_INVALID" self.copy_test_data_file(filename, self.uploads_dir) existing_file_content = self.load_test_data_file(filename) # Upload a valid map file, but rename it to the existing filename upload_file_content = self.load_test_data_file("map_1500.dat") data = OrderedMultiDict() data.add("userName", username) data.add("map", (BytesIO(upload_file_content), filename)) response = self.perform_upload(data) assert response.status_code == 200 assert response.mimetype == "text/html" # Verify that the existing map file was NOT overwritten self.verify_file_content(self.uploads_dir, filename, existing_file_content) self.verify_flash_message_by_key(message_key, response.data, filename) mock_logger.warn.assert_called_with(self.get_log_message(message_key), filename, username)
def test_upload_file_that_already_exists_should_fail(self, mock_logger): username = "******" filename = "mrt_v5_final_elevated_centre_station.schematic" uploaded_filename = self.uploaded_filename(username, filename) impostor_filename = "mrt_v5_final_underground_single_track.schematic" message_key = "SCHEMATIC_UPLOAD_FILE_EXISTS" # Copy an impostor file with different content to the uploads directory with the same name as the file to upload self.copy_test_data_file(impostor_filename, self.uploads_dir, uploaded_filename) original_file_content = self.load_test_data_file(filename) data = OrderedMultiDict() data.add("userName", username) data.add("schematic", (BytesIO(original_file_content), filename)) response = self.perform_upload(data) assert response.status_code == 200 assert response.mimetype == "text/html" # Verify that the uploads directory has only the impostor file, and the file has not been modified files = os.listdir(self.uploads_dir) assert len(files) == 1 impostor_file_content = self.load_test_data_file(impostor_filename) self.verify_file_content(self.uploads_dir, uploaded_filename, impostor_file_content) self.verify_flash_message_by_key(message_key, response.data, uploaded_filename) mock_logger.warn.assert_called_with(self.get_log_message(message_key), uploaded_filename, username)
def test_upload_with_invalid_username_should_fail(self, mock_logger, username, message_key): filename = "map_1500.dat" existing_filename = "existing_unlocked.dat" self.copy_test_data_file(existing_filename, self.uploads_dir, filename) existing_file_content = self.load_test_data_file(existing_filename) upload_file_content = self.load_test_data_file(filename) data = OrderedMultiDict() data.add("userName", username) data.add("map", (BytesIO(upload_file_content), filename)) response = self.perform_upload(data) assert response.status_code == 200 assert response.mimetype == "text/html" # Verify that the existing map file was NOT overwritten self.verify_file_content(self.uploads_dir, filename, existing_file_content) self.verify_flash_message_by_key(message_key, response.data) if username: mock_logger.warn.assert_called_with( self.get_log_message(message_key), username) else: mock_logger.warn.assert_called_with( self.get_log_message(message_key))
def get_with_data(*args, **kwargs): filters = kwargs.pop('filters', None) limit = kwargs.pop('limit', None) offset = kwargs.pop('offset', 0) order = kwargs.pop('order', Reservation.start_dt) limit_per_room = kwargs.pop('limit_per_room', False) occurs_on = kwargs.pop('occurs_on') if kwargs: raise ValueError('Unexpected kwargs: {}'.format(kwargs)) query = Reservation.query.options(joinedload(Reservation.room)) if filters: query = query.filter(*filters) if occurs_on: query = query.filter( Reservation.id.in_(db.session.query(ReservationOccurrence.reservation_id) .filter(ReservationOccurrence.date.in_(occurs_on), ReservationOccurrence.is_valid)) ) if limit_per_room and (limit or offset): query = limit_groups(query, Reservation, Reservation.room_id, order, limit, offset) query = query.order_by(order, Reservation.created_dt) if not limit_per_room: if limit: query = query.limit(limit) if offset: query = query.offset(offset) result = OrderedDict((r.id, {'reservation': r}) for r in query) if 'vc_equipment' in args: vc_id_subquery = db.session.query(EquipmentType.id) \ .correlate(Reservation) \ .filter_by(name='Video conference') \ .join(RoomEquipmentAssociation) \ .filter(RoomEquipmentAssociation.c.room_id == Reservation.room_id) \ .as_scalar() # noinspection PyTypeChecker vc_equipment_data = dict(db.session.query(Reservation.id, static_array.array_agg(EquipmentType.name)) .join(ReservationEquipmentAssociation, EquipmentType) .filter(Reservation.id.in_(result.iterkeys())) .filter(EquipmentType.parent_id == vc_id_subquery) .group_by(Reservation.id)) for id_, data in result.iteritems(): data['vc_equipment'] = vc_equipment_data.get(id_, ()) if 'occurrences' in args: occurrence_data = OrderedMultiDict(db.session.query(ReservationOccurrence.reservation_id, ReservationOccurrence) .filter(ReservationOccurrence.reservation_id.in_(result.iterkeys())) .order_by(ReservationOccurrence.start_dt)) for id_, data in result.iteritems(): data['occurrences'] = occurrence_data.getlist(id_) return result.values()
def create_request_data(self, filename): pair = os.path.splitext(filename) data = OrderedMultiDict() data.add("fileRoot", pair[0]) data.add("fileExtension", pair[1][1:]) return data
def test_ordered_multidict_encoding(): d = OrderedMultiDict() d.add("foo", 1) d.add("foo", 2) d.add("foo", 3) d.add("bar", 0) d.add("foo", 4) assert urls.url_encode(d) == "foo=1&foo=2&foo=3&bar=0&foo=4"
def test_ordered_multidict_encoding(self): d = OrderedMultiDict() d.add('foo', 1) d.add('foo', 2) d.add('foo', 3) d.add('bar', 0) d.add('foo', 4) self.assert_equal(urls.url_encode(d), 'foo=1&foo=2&foo=3&bar=0&foo=4')
def test_multidict_encoding(self): d = OrderedMultiDict() d.add('2013-10-10T23:26:05.657975+0000', '2013-10-10T23:26:05.657975+0000') self.assert_equal( urls.url_encode(d), '2013-10-10T23%3A26%3A05.657975%2B0000=2013-10-10T23%3A26%3A05.657975%2B0000' )
def test_ordered_multidict_encoding(): d = OrderedMultiDict() d.add('foo', 1) d.add('foo', 2) d.add('foo', 3) d.add('bar', 0) d.add('foo', 4) assert urls.url_encode(d) == 'foo=1&foo=2&foo=3&bar=0&foo=4'
def __int__(self, args, authz, limit=None, max_limit=MAX_PAGE): if not isinstance(args, MultiDict): args = OrderedMultiDict(args) self.args = args self.authz = authz self.offset = max(0, self.getint("offset", 0)) if limit is None: limit = min(max_limit, max(0, self.getint("limit", 20))) self.limit = limit
def test_ordered_multidict_encoding(): """"Make sure URLs are properly encoded from OrderedMultiDicts""" d = OrderedMultiDict() d.add('foo', 1) d.add('foo', 2) d.add('foo', 3) d.add('bar', 0) d.add('foo', 4) assert url_encode(d) == 'foo=1&foo=2&foo=3&bar=0&foo=4'
def __init__(self, args, authz, limit=None): if not isinstance(args, MultiDict): args = OrderedMultiDict(args) self.args = args self.authz = authz self.offset = max(0, self.getint('offset', 0)) if limit is None: limit = min(MAX_RESULT_WINDOW, max(0, self.getint('limit', 20))) self.limit = limit self.prefix = stringify(self.get('prefix'))
def __init__(self, parsed): parsed = helpers.ensure_soup(parsed) if parsed.name != 'form': parsed = parsed.find('form') self.parsed = parsed self.action = self.parsed.get('action') self.method = self.parsed.get('method', 'get') self.fields = OrderedMultiDict() for field in _parse_fields(self.parsed): self.add_field(field)
def __init__(self): """ Creates a new request. The request is probably useless right now, but the HTTP parser will then go on to set the right attributes on it. """ # Empty values. self.method = "" # This differs from path/query because it's the full `/a/b/?c=d`. # This is then urlsplit into a path and query string in _parse_path. self.full_path = b"" self.path = "" self.query = "" self.version = "" # Empty body, as this isn't known until it's passed in. self.body = "" self.cookies = cookies.SimpleCookie() # We use a Headers object here as it serves our purposes the best. self.headers = Headers() # Args, values, and forms are OrderedMultiDicts. # So are files. self.args = OrderedMultiDict() self._form = OrderedMultiDict() self.values = OrderedMultiDict() self.files = OrderedMultiDict() # Protocol-specific data. self.ip = "" self.port = 0 # Extra values, for hooks. self.extra = {} self.should_keep_alive = False
def test_with_strict_args_get_valid(self, mock_request_factory): request = self.TestReq() request.method = 'GET' request.args = OrderedMultiDict({'f': 'val'}) mock_request_factory.return_value = request @strict.with_strict_args(self.TestSchema(), locations=('query', )) def viewfunc(): pass viewfunc()
def test_parse_arg_keys(self): arg_dict = OrderedMultiDict() arg_dict.add('f', 'val') arg_dict.add('b', 'val') arg_dict.add('x', 'val') reference_result = tuple(['f', 'b', 'x']) test_result = strict.KeywordParser.\ _parse_arg_keys(arg_dict) self.assertEqual(test_result, reference_result)
def test_nested_checkboxes_question_followup_get_data(self): question = self.question(questions=[ { "id": "lead", "type": "checkboxes", "options": [ {"label": "label1", "value": "yes"}, {"label": "label2", "value": "no"}, {"label": "label3", "value": "maybe not"}, {"label": "label4", "value": "maybe"}, ], "followup": {"follow": ["yes", "maybe"]} }, { "id": "follow", "type": "text", } ]) assert question.get_data(OrderedMultiDict([ ('lead', 'no'), ('lead', 'maybe not'), ])) == {'lead': ['no', 'maybe not'], 'follow': None} assert question.get_data(OrderedMultiDict([ ('lead', 'yes'), ('lead', 'maybe not'), ])) == {'lead': ['yes', 'maybe not']} assert question.get_data(OrderedMultiDict([ ('lead', 'yes'), ('lead', 'maybe not'), ('follow', 'a') ])) == {'lead': ['yes', 'maybe not'], 'follow': 'a'} assert question.get_data(OrderedMultiDict([ ('lead', 'no'), ('lead', 'maybe not'), ('follow', 'a') ])) == {'lead': ['no', 'maybe not'], 'follow': None}
def test_upload_same_file_twice_should_fail(self, mock_logger): username = "******" filename = "map_1500.dat" message_key = "MAP_UPLOAD_EXISTING_MAP_LOCKED" self.copy_test_data_file("existing_unlocked.dat", self.uploads_dir, filename) original_file_content = self.load_test_data_file(filename) first_data = OrderedMultiDict() first_data.add("userName", username) first_data.add("map", (BytesIO(original_file_content), filename)) first_response = self.perform_upload(first_data) second_data = OrderedMultiDict() second_data.add("userName", username) second_data.add("map", (BytesIO(original_file_content), filename)) second_response = self.perform_upload(second_data) assert second_response.status_code == 200 assert second_response.mimetype == "text/html" # Verify that the new map file was uploaded and locked, # but the "existing file locked" error message appears after second upload expected_nbt_file = self.load_test_data_nbt_file(filename) uploaded_nbt_file = self.load_uploaded_nbt_file(filename) self.verify_matching_nbt_values(expected_nbt_file, uploaded_nbt_file) assert get_nbt_map_value(uploaded_nbt_file, "locked") == 1 self.verify_flash_message_by_key(message_key, second_response.data, filename) mock_logger.warn.assert_called_with(self.get_log_message(message_key), filename, username)
def __init__(self, args, authz, limit=None): if not isinstance(args, MultiDict): args = OrderedMultiDict(args) self.args = args self.authz = authz self.offset = max(0, self.getint('offset', 0)) if limit is None: limit = min(MAX_PAGE, max(0, self.getint('limit', 20))) self.limit = limit self.text = stringify(self.get('q')) self.prefix = stringify(self.get('prefix')) # Disable or enable query caching self.cache = self.getbool('cache', settings.CACHE)
def __init__(self, args, authz, limit=None, max_limit=MAX_PAGE): if not isinstance(args, MultiDict): args = OrderedMultiDict(args) self.args = args self.authz = authz self.offset = max(0, self.getint("offset", 0)) if limit is None: limit = min(max_limit, max(0, self.getint("limit", 20))) self.limit = limit self.text = sanitize_text(self.get("q")) self.prefix = sanitize_text(self.get("prefix")) # Disable or enable query caching self.cache = self.getbool("cache", settings.CACHE)
def test_with_strict_args_get_invalid(self, mock_request_factory): request = self.TestReq() request.method = 'GET' request.args = OrderedMultiDict() request.args.add('f', 'val') request.args.add('b', 'val') mock_request_factory.return_value = request @strict.with_strict_args(self.TestSchema(), locations=('query', )) def viewfunc(): pass with self.assertRaises(strict.ValidationError): viewfunc()
def test_upload_with_no_files_should_fail(self, mock_logger): username = "******" message_key = "MAP_UPLOAD_NO_FILES" data = OrderedMultiDict() data.add("userName", username) response = self.perform_upload(data) assert response.status_code == 200 assert response.mimetype == "text/html" self.verify_flash_message_by_key(message_key, response.data) mock_logger.warn.assert_called_with(self.get_log_message(message_key), username)
def export_room(self, user): loc = Location.query.filter_by(name=self._location, is_deleted=False).first() if loc is None: return # Retrieve rooms rooms_data = list(Room.get_with_data(filters=[Room.id.in_(self._ids), Room.location_id == loc.id])) # Retrieve reservations reservations = None if self._detail == 'reservations': reservations = OrderedMultiDict(_export_reservations(self, True, False, [ Reservation.room_id.in_(x['room'].id for x in rooms_data) ])) for result in rooms_data: yield _serializable_room(result, reservations)
def get_with_data(*args, **kwargs): filters = kwargs.pop('filters', None) limit = kwargs.pop('limit', None) offset = kwargs.pop('offset', 0) order = kwargs.pop('order', Reservation.start_dt) limit_per_room = kwargs.pop('limit_per_room', False) occurs_on = kwargs.pop('occurs_on') if kwargs: raise ValueError(f'Unexpected kwargs: {kwargs}') query = Reservation.query.options(joinedload(Reservation.room)) if filters: query = query.filter(*filters) if occurs_on: query = query.filter( Reservation.id.in_( db.session.query( ReservationOccurrence.reservation_id).filter( ReservationOccurrence.date.in_(occurs_on), ReservationOccurrence.is_valid))) if limit_per_room and (limit or offset): query = limit_groups(query, Reservation, Reservation.room_id, order, limit, offset) query = query.order_by(order, Reservation.created_dt) if not limit_per_room: if limit: query = query.limit(limit) if offset: query = query.offset(offset) result = {r.id: {'reservation': r} for r in query} if 'occurrences' in args: occurrence_data = OrderedMultiDict( db.session.query(ReservationOccurrence.reservation_id, ReservationOccurrence).filter( ReservationOccurrence.reservation_id.in_( result.keys())).order_by( ReservationOccurrence.start_dt)) for id_, data in result.items(): data['occurrences'] = occurrence_data.getlist(id_) return list(result.values())
def export_room(self, aw): loc = Location.find_first(name=self._location) if loc is None: return # Retrieve rooms rooms_data = list(Room.get_with_data('vc_equipment', 'non_vc_equipment', filters=[Room.id.in_(self._ids), Room.location_id == loc.id])) # Retrieve reservations reservations = None if self._detail == 'reservations': reservations = OrderedMultiDict(_export_reservations(self, True, False, [ Reservation.room_id.in_(x['room'].id for x in rooms_data) ])) for result in rooms_data: yield _serializable_room(result, reservations)
def test_upload_multiple_files_should_be_successful(self, mock_logger): username = "******" message_key = "MAP_UPLOAD_SUCCESS" # Upload 7 files filenames = [ "map_1500.dat", "map_2000.dat", "map_1501.dat", "map_1502.dat", "map_1001.dat", "map_1503.dat", "map_1504.dat" ] for filename in filenames: self.copy_test_data_file("existing_unlocked.dat", self.uploads_dir, filename) original_files = self.load_test_data_files(filenames) data = OrderedMultiDict() data.add("userName", username) for filename in original_files: data.add("map", (BytesIO(original_files[filename]), filename)) response = self.perform_upload(data) assert response.status_code == 200 assert response.mimetype == "text/html" logger_calls = [] for filename in original_files: # Verify that the new map files were uploaded and locked expected_nbt_file = self.load_test_data_nbt_file(filename) uploaded_nbt_file = self.load_uploaded_nbt_file(filename) self.verify_matching_nbt_values(expected_nbt_file, uploaded_nbt_file) assert get_nbt_map_value(uploaded_nbt_file, "locked") == 1 self.verify_flash_message_by_key(message_key, response.data, filename) logger_calls.append( call(self.get_log_message(message_key), filename, username)) mock_logger.info.assert_has_calls(logger_calls, any_order=True)
def _is_canonical_url(self, canonical_params): """Returns whether the current request URL is canonical.""" self.ensure_one() # Compare OrderedMultiDict because the order is important, there must be # only one canonical and not params permutations. params = request.httprequest.args canonical_params = canonical_params or OrderedMultiDict() if params != canonical_params: return False # Compare URL at the first rerouting iteration (if available) because # it's the one with the language in the path. # It is important to also test the domain of the current URL. current_url = request.httprequest.url_root[:-1] + (hasattr(request, 'rerouting') and request.rerouting[0] or request.httprequest.path) canonical_url = self._get_canonical_url_localized(lang=request.lang, canonical_params=None) # A request path with quotable characters (such as ",") is never # canonical because request.httprequest.base_url is always unquoted, # and canonical url is always quoted, so it is never possible to tell # if the current URL is indeed canonical or not. return current_url == canonical_url