def hook(req, resp, resource, params): length = req.content_length if length is not None and length > limit: msg = ('The size of the file is too large. The file must not ' 'exceed ' + str(limit) + ' bytes in length.') raise falcon.HTTPRequestEntityTooLarge('File size is too large', msg)
def on_get(self, req, resp, **kwargs): query = req.get_param('q') if not query: raise falcon.HTTPBadRequest('Missing query', 'Missing query') limit = req.get_param_as_int('limit') or 5 # use config autocomplete = req.get_param_as_bool('autocomplete') if autocomplete is None: # Default is True. # https://github.com/falconry/falcon/pull/493#discussion_r44376219 autocomplete = True lon, lat = self.parse_lon_lat(req) center = None if lon and lat: center = (lon, lat) filters = self.match_filters(req) try: results = search(query, limit=limit, autocomplete=autocomplete, lat=lat, lon=lon, **filters) except EntityTooLarge as e: raise falcon.HTTPRequestEntityTooLarge(str(e)) if not results: log_notfound(query) log_query(query, results) self.to_geojson(req, resp, results, query=query, filters=filters, center=center, limit=limit)
def test_http_request_entity_too_large_no_title_and_desc_and_challenges(self): try: raise falcon.HTTPRequestEntityTooLarge() except falcon.HTTPRequestEntityTooLarge as e: self.assertEqual(status.HTTP_413, e.title, 'The title should be ' + status.HTTP_413 + ', but it is: ' + e.title) self.assertEqual(None, e.description, 'The description should be None') self.assertNotIn('Retry-After', e.headers, 'Retry-After should not be in the headers')
def process_row(self, req, row, filters, columns): row_split = dict([(k, v and v.split('|')) for k, v in row.items()]) # Generate all combinations # We don't want None in a join. q0 = list( filter(lambda x: x and x != '', [ ' '.join([l or '' for l in i]) for i in itertools.product( *[row_split[k] or [None] for k in self.columns0]) ])) q = list( filter(lambda x: x and x != '', [ ' '.join([l or '' for l in i]) for i in itertools.product( *[row_split[k] or [None] for k in columns]) ])) filters = self.match_row_filters(row, filters) lat_column = req.get_param('lat') lon_column = req.get_param('lon') if lon_column and lat_column: lat = row.get(lat_column) lon = row.get(lon_column) if lat and lon: filters['lat'] = float(lat) filters['lon'] = float(lon) try: if len(q0) == 0: results = multiple_search(q, autocomplete=False, limit=1, **filters) query = '|'.join(q) else: results = search2steps(self.config, q0[0], q, autocomplete=False, limit=1, **filters) query = '|'.join(q0) + ' ' + ('|').join(q) except EntityTooLarge as e: raise falcon.HTTPRequestEntityTooLarge(str(e)) log_query(query, results) if results: result = results[0] row.update({ 'latitude': result.lat, 'longitude': result.lon, 'result_label': str(result), 'result_score': round(result.score, 2), 'result_type': result.type, 'result_id': result.id, 'result_housenumber': result.housenumber, 'result_citycode': result.citycode, }) self.add_extra_fields(row, result) else: log_notfound(query)
def test_http_request_entity_too_large_with_title_and_desc_and_challenges(self): try: raise falcon.HTTPRequestEntityTooLarge(title='Test', description='Testdescription', retry_after=123) except falcon.HTTPRequestEntityTooLarge as e: self.assertEqual('Test', e.title, 'Title should be "Test"') self.assertEqual('Testdescription', e.description, 'Description should be "Testdescription"') self.assertEqual('123', e.headers['Retry-After'], 'Retry-After should be 123')
def hook(req, resp, resouce, params): length = req.content_length if length is not None and length > limit: msg = ('This is of the request is too large.\ The body must not exceed \ ' + str(limit) + ' bytes in length.') raise falcon.HTTPRequestEntityTooLarge('Request body is to large', msg)
def hook(req, resp, resource, params): length = req.content_length req.stream.read() if length is not None and length > 500: msg = ('The size of the request is too large. The body must not ' 'exceed ' + str(limit) + ' bytes in length.') raise falcon.HTTPRequestEntityTooLarge( 'Request body is too large', msg)
def on_get(self, req, resp, **kwargs): q0 = req.get_param('q0') q0 = q0.split('|') if q0 and len(q0.strip()) > 0 else [] q = req.get_param('q') q = q.split('|') if q and len(q.strip()) > 0 else [] if len(q) == 0 and len(q0) == 0: raise falcon.HTTPBadRequest('Missing query', 'Missing query') limit = req.get_param_as_int('limit') or 5 # use config autocomplete = req.get_param_as_bool('autocomplete') if autocomplete is None: # Default is True. # https://github.com/falconry/falcon/pull/493#discussion_r44376219 autocomplete = True lon, lat = self.parse_lon_lat(req) center = None if lon and lat: center = (lon, lat) filters = self.match_filters(req) try: if len(q0) == 0: results = multiple_search(q, limit=limit, autocomplete=False, lat=lat, lon=lon, **filters) query = '|'.join(q) else: results = search2steps(self.config, q0[0], q, autocomplete=autocomplete, limit=limit, lat=lat, lon=lon, **filters) query = '|'.join(q0) + ' ' + ('|').join(q) except EntityTooLarge as e: raise falcon.HTTPRequestEntityTooLarge(str(e)) if not results: log_notfound(query) log_query(query, results) self.to_geojson(req, resp, results, query=query, filters=filters, center=center, limit=limit)
def process_row(self, req, row, filters, columns, index): # We don't want None in a join. q = ' '.join([row[k] or '' for k in columns]) filters = self.match_row_filters(row, filters) lat_column = req.get_param('lat') lon_column = req.get_param('lon') if lon_column and lat_column: lat = row.get(lat_column) lon = row.get(lon_column) if lat and lon: filters['lat'] = float(lat) filters['lon'] = float(lon) try: results = search(q, autocomplete=False, limit=3, **filters) except EntityTooLarge as e: msg = '{} (row number {})'.format(str(e), index + 1) raise falcon.HTTPRequestEntityTooLarge(msg) log_query(q, results) if results: result = results[0] row.update({ 'latitude': result.lat, 'longitude': result.lon, 'result_label': str(result), 'result_score': round(result.score, 2), 'result_score_next': 0 if len(results) < 2 else round(results[1].score, 2), 'result_type': result.type, 'result_id': result.id, 'result_housenumber': result.housenumber, }) self.add_extra_fields(row, result) else: log_notfound(q)
def save_dataset(self, req, resp, dataset_meta): """ Multipart dataset upload """ if req.content_length > MAX_DATASET_SIZE: raise falcon.HTTPRequestEntityTooLarge( title="Dataset is too large", description="Dataset size must be less than {size} bytes". format(size=MAX_DATASET_SIZE)) env = req.env env.setdefault('QUERY_STRING', '') form = cgi.FieldStorage(fp=req.stream, environ=env) file_item = form['file'] if file_item.file: file = file_item.file try: manager.save_dataset(dataset_meta, file) except OSError as err: raise falcon.HTTPUnsupportedMediaType( description="Can not open dataset. Invalid type.") except KeyError as err: raise falcon.HTTPUnsupportedMediaType( description="Dataset has not 'x' or 'y' keys") else: logger.debug('Multipart not contain file item') raise falcon.HTTPUnsupportedMediaType( description="Multipart must contain 'file' field") resp.status = falcon.HTTP_200 resp.media = { 'id': dataset_meta.id, 'date': dataset_meta.base.date, 'size': dataset_meta.base.size, 'hash': dataset_meta.base.hash }
def validate_payload_size(req): """Validates payload size. Method validates sent payload size. It expects that http header **Content-Length** is present. If it does not, method raises :py:class:`falcon.HTTPLengthRequired`. Otherwise values is being compared with :: [service] max_log_size = 1048576 **max_log_size** refers to the maximum allowed content length. If it is exceeded :py:class:`falcon.HTTPRequestEntityTooLarge` is thrown. :param falcon.Request req: current request :exception: :py:class:`falcon.HTTPLengthRequired` :exception: :py:class:`falcon.HTTPRequestEntityTooLarge` """ payload_size = req.content_length max_size = CONF.service.max_log_size LOG.debug('Payload (content-length) is %s', str(payload_size)) if payload_size is None: raise falcon.HTTPLengthRequired( title='Content length header is missing', description='Content length is required to estimate if ' 'payload can be processed' ) if payload_size >= max_size: raise falcon.HTTPRequestEntityTooLarge( title='Log payload size exceeded', description='Maximum allowed size is %d bytes' % max_size )
def on_get(self, req, resp): raise falcon.HTTPRequestEntityTooLarge('Request Rejected', 'Request Body Too Large', retry_after=self.retry_after)
def on_get(self, req, resp, **kwargs): query = req.get_param('q') language = req.get_param('language') or 'zh' if not query: raise falcon.HTTPBadRequest('Missing query', 'Missing query') limit = req.get_param_as_int('limit') or 20 # use config autocomplete = req.get_param_as_bool('autocomplete') if autocomplete is None: # Default is True. # https://github.com/falconry/falcon/pull/493#discussion_r44376219 autocomplete = True lon, lat = self.parse_lon_lat(req) center = None if lon and lat: center = (lon, lat) filters = self.match_filters(req) timer = time.perf_counter() try: results = search(query, limit=limit, autocomplete=autocomplete, lat=lat, lon=lon, **filters) except EntityTooLarge as e: raise falcon.HTTPRequestEntityTooLarge(str(e)) timer = int((time.perf_counter() - timer) * 1000) if not results: log_notfound(query) log_query(query, results) if config.SLOW_QUERIES and timer > config.SLOW_QUERIES: log_slow_query(query, results, timer) filtered_results = {} def sortbyindex(item): idx = item.name.lower().find(query.lower()) if idx == -1: idx = 100 return idx def sortbylang(item): if item.lang == language: return 0 else: return 1 def sortbylength(item): return len(item.name) results.sort( key=lambda x: (sortbylang(x), sortbyindex(x), sortbylength(x))) count = 0 for r in results: if not filtered_results.get(r.name.lower()) and r.type[0] != 'R': filtered_results[r.name.lower()] = r count += 1 elif not filtered_results.get(r.id) and r.type[0] == 'R': filtered_results[r.id] = r count += 1 if not req.get_param_as_int('limit') and count == 5: break self.render(req, resp, list(filtered_results.values()), query=query, filters=filters, center=center, limit=limit)