def test_parse_request_where(self): self.app.config['DOMAIN'][self.known_resource]['allowed_filters'] = \ ['ref'] with self.app.test_request_context(): self.assertEqual(parse_request(self.known_resource).where, None) with self.app.test_request_context('/?where=hello'): self.assertEqual(parse_request(self.known_resource).where, 'hello')
def test_parse_request_if_match(self): self.assertEqual(parse_request().if_match, None) self.assertEqual(parse_request(headers=None).if_match, None) self.assertEqual( parse_request(headers={ 'If-Match': self.etag }).if_match, self.etag)
def like(product_id): product = current_app.data.find("product", parse_request('product'), {'_id': product_id})[0] category = current_app.data.find("category", parse_request('category'), {'_id': product['category']})[0] current_app.data.insert( 'like', { 'does_like': request.args.get('value', 'true').lower() == 'true', 'user': session['user_id'], 'product': product_id, 'viewed': False }) items = json.loads( meli.get("/sites/MLA/search/?category=%s&q=%s" % (category['meli_id'], product['description'])).content) for item in items['results']: offer = { 'meli_seller': item['seller']['id'], 'meli_item_id': item['id'], 'meli_link': item['permalink'], 'meli_image': item['thumbnail'], 'product': product_id, 'original_price': item['price'], 'new_price': None } current_app.data.insert('offer', offer) return ""
def test_parse_request_if_match(self): with self.app.test_request_context(): self.assertEqual(parse_request(self.known_resource).if_match, None) with self.app.test_request_context(headers=None): self.assertEqual(parse_request(self.known_resource).if_match, None) with self.app.test_request_context(headers={"If-Match": self.etag}): self.assertEqual(parse_request(self.known_resource).if_match, self.etag)
def update_amazon_products(): categories = current_app.data.find('category', parse_request('category'), {}) app.logger.debug('Categories {}'.format(categories)) for category in categories: app.logger.debug('Category {}'.format(category)) for product in amazon_api.item_search(category['amazon_name'], Keywords=category['name']): app.logger.debug('Product ASIN {}'.format(product.ASIN)) exists = current_app.data.find('product', parse_request('product'), { 'product_id': str(product.ASIN) }).count() > 0 if not exists: try: image_url = amazon_api.item_lookup( str(product.ASIN), ResponseGroup='Images').Items.Item.LargeImage.URL product_entry = { 'product_id': str(product.ASIN), 'image_url': str(image_url), 'description': unicode(product.ItemAttributes.Title), 'category': category['_id'] } app.logger.debug('Entry {}'.format(product_entry)) current_app.data.insert('product', product_entry) except AttributeError: continue return ''
def test_parse_request_where(self): self.app.config["DOMAIN"][self.known_resource]["allowed_filters"] = [ "ref" ] with self.app.test_request_context(): self.assertEqual(parse_request(self.known_resource).where, None) with self.app.test_request_context("/?where=hello"): self.assertEqual(parse_request(self.known_resource).where, "hello")
def test_parse_request_if_match(self): with self.app.test_request_context(): self.assertEqual(parse_request(self.known_resource).if_match, None) with self.app.test_request_context(headers=None): self.assertEqual(parse_request(self.known_resource).if_match, None) with self.app.test_request_context(headers={'If-Match': self.etag}): self.assertEqual( parse_request(self.known_resource).if_match, self.etag)
def test_parse_request_if_modified_since(self): ims = "If-Modified-Since" with self.app.test_request_context(): self.assertEqual(parse_request(self.known_resource).if_modified_since, None) with self.app.test_request_context(headers=None): self.assertEqual(parse_request(self.known_resource).if_modified_since, None) with self.app.test_request_context(headers={ims: self.datestr}): self.assertEqual(parse_request(self.known_resource).if_modified_since, self.valid + timedelta(seconds=1)) with self.app.test_request_context(headers={ims: "not-a-date"}): self.assertRaises(ValueError, parse_request, self.known_resource) with self.app.test_request_context(headers={ims: self.datestr.replace("GMT", "UTC")}): self.assertRaises(ValueError, parse_request, self.known_resource) self.assertRaises(ValueError, parse_request, self.known_resource)
def test_parse_request_if_modified_since(self): ims = 'If-Modified-Since' self.assertEqual(parse_request().if_modified_since, None) self.assertEqual(parse_request(headers=None).if_modified_since, None) self.assertEqual(parse_request( headers={ims: self.datestr}).if_modified_since, self.valid + timedelta(seconds=1)) self.assertRaises(ValueError, parse_request, headers={ims: 'not-a-date'}) self.assertRaises(ValueError, parse_request, headers={ims: self.datestr.replace('UTC', 'GMT')})
def suggest_products(): user = current_app.data.find('user', parse_request('user'), {'_id': session['user_id']})[0] items_cursor = current_app.data.find('product', parse_request('product'), {}) items = [] for item in items_cursor: #if str(item['category']) in [str(p) for p in user['preferences']]: item['_id'] = str(item['_id']) item['category'] = str(item['category']) items.append(item) random.shuffle(items) return json.dumps(items)
def test_phrase_search_query(self): with self.app.app_context(): self.app.data.insert('items', [ {'uri': 'foo bar'}, {'uri': 'some text'} ]) with self.app.test_request_context('/items/?q="foo bar"'): req = parse_request('items') cursor = self.app.data.find('items', req, None) self.assertEquals(1, cursor.count()) with self.app.test_request_context('/items/?q="bar foo"'): req = parse_request('items') cursor = self.app.data.find('items', req, None) self.assertEquals(0, cursor.count())
def get(resource): """Retrieves the resource documents that match the current request. :param resource: the name of the resource. .. versionchanged:: 0.0.4 Added the ``requires_auth`` decorator. .. versionchanged:: 0.0.3 Superflous ``response`` container removed. Collection items wrapped with ``_items``. Links wrapped with ``_links``. Links are now properly JSON formatted. """ documents = [] response = {} last_updated = datetime.min req = parse_request() cursor = app.data.find(resource, req) for document in cursor: # flask-pymongo returns timezone-aware value, we strip it out # because std lib datetime doesn't provide that, and comparisions # between the two values would fail # TODO consider testing if the app.data is of type Mongo before # replacing the tzinfo. On the other hand this could be handy for # other drivers as well (think of it as a safety measure). A # 'pythonic' alternative would be to perform the comparision in a # try..catch statement.. performing the replace in case of an # exception. However that would mean getting the exception at each # execution under standard circumstances (the default driver being # Mongo). document[config.LAST_UPDATED] = \ document[config.LAST_UPDATED].replace(tzinfo=None) if document[config.LAST_UPDATED] > last_updated: last_updated = document[config.LAST_UPDATED] # document metadata document['etag'] = document_etag(document) document['_links'] = {'self': document_link(resource, document[config.ID_FIELD])} documents.append(document) if req.if_modified_since and len(documents) == 0: # the if-modified-since conditional request returned no documents, we # send back a 304 Not-Modified, which means that the client already # has the up-to-date representation of the resultset. status = 304 last_modified = None else: status = 200 last_modified = last_updated if last_updated > datetime.min else None response['_items'] = documents response['_links'] = _pagination_links(resource, req, cursor.count()) etag = None return response, last_modified, etag, status
def _perform_find(resource, lookup): """ .. versionadded:: 0.7 """ documents = [] response = {} etag = None req = parse_request(resource) embedded_fields = resolve_embedded_fields(resource, req) # continue processing the full request last_update = epoch() # If-Modified-Since disabled on collections (#334) req.if_modified_since = None cursor = app.data.find(resource, req, lookup) # If soft delete is enabled, data.find will not include items marked # deleted unless req.show_deleted is True for document in cursor: build_response_document(document, resource, embedded_fields) documents.append(document) # build last update for entire response if document[config.LAST_UPDATED] > last_update: last_update = document[config.LAST_UPDATED] status = 200 headers = [] last_modified = last_update if last_update > epoch() else None response[config.ITEMS] = documents if config.OPTIMIZE_PAGINATION_FOR_SPEED: count = None else: count = cursor.count(with_limit_and_skip=False) headers.append((config.HEADER_TOTAL_COUNT, count)) if config.DOMAIN[resource]['hateoas']: response[config.LINKS] = _pagination_links(resource, req, count) # add pagination info if config.DOMAIN[resource]['pagination']: response[config.META] = _meta_links(req, count) # notify registered callback functions. Please note that, should the # functions modify the documents, the last_modified and etag won't be # updated to reflect the changes (they always reflect the documents # state on the database.) getattr(app, "on_fetched_resource")(resource, response) getattr(app, "on_fetched_resource_%s" % resource)(response) # the 'extra' cursor field, if present, will be added to the response. # Can be used by Eve extensions to add extra, custom data to any # response. if hasattr(cursor, 'extra'): getattr(cursor, 'extra')(response) return response, last_modified, etag, status, headers
def _get_lookup(): """Get the where clause lookup just like Eve does. Unfortunately, Eve only parses the `where` at a very low level and does not provide any methods to elegantly access it, so we have use the same internal functions as Eve does. (Recently, Eve has at least somewhat exposed the parsing, but this code is not part of an official release yet [1]) As soon as there is some 'official' support, this can be removed, as it is basically copied, with the abort removed for simplicity (as Eve itself will already abort if there's an error). [1]: https://github.com/pyeve/eve/blob/master/eve/io/mongo/mongo.py """ req = parse_request('studydocuments') if req and req.where: try: # Mongo Syntax return current_app.data._sanitize('studydocuments', json.loads(req.where)) except (HTTPException, json.JSONDecodeError): # Python Syntax return parse(req.where) return {} # No where clause
def authorize(): if request.method == 'GET': meli.authorize(code=request.args.get('code', ''), redirect_URI=REDIRECT_URI) session['access_token'] = meli.access_token session['refresh_token'] = meli.refresh_token meli_user = json.loads( meli.get("/users/me", { 'access_token': session['access_token'] }).content) session['meli_email'] = meli_user['email'] session['meli_id'] = meli_user['id'] exists = current_app.data.find('user', parse_request('user'), {"meli_id": meli_user['id']}) if exists.count() == 0: user_id = str( current_app.data.insert( 'user', { 'meli_id': meli_user['id'], 'email': meli_user['email'], 'preferences': [] })) else: user_id = exists[0]['_id'] session['user_id'] = str(user_id) return redirect("/app")
def test_elastic_find_default_sort_no_mapping(self): self.app.data.es.indices.delete_mapping(INDEX, '_all') with self.app.test_request_context('/items/'): req = parse_request('items') req.args = {} cursor = self.app.data.find('items', req, None) self.assertEqual(0, cursor.count())
def getitem(resource, **lookup): """ Retrieves and returns a single document. :param resource: the name of the resource to which the document belongs. :param **lookup: the lookup query. """ response = dict() req = parse_request() document = app.data.find_one(resource, **lookup) if document: # need to update the document field as well since the etag must # be computed on the same document representation that might have # been used in the collection 'get' method last_modified = document[config.LAST_UPDATED] = \ document[config.LAST_UPDATED].replace(tzinfo=None) etag = document_etag(document) if req.if_none_match and etag == req.if_none_match: # request etag matches the current server representation of the # document, return a 304 Not-Modified. return response, last_modified, etag, 304 if req.if_modified_since and last_modified <= req.if_modified_since: # request If-Modified-Since conditional request match. We test # this after the etag since Last-Modified dates have lower # resolution (1 second). return response, last_modified, etag, 304 document['link'] = document_link(resource, document[config.ID_FIELD]) response[resource] = document response['links'] = standard_links(resource) return response, last_modified, etag, 200 abort(404)
def resolve_additional_embedded_documents(document, resource, embedded_fields): req = parse_request(resource) try: if req.embedded: extra_embedded = json.loads(req.embedded) else: extra_embedded = {} except ValueError: abort(400, description='Unable to parse `embedded` clause') extra_embedded = extra_embedded.keys() for field in new_resolve_embedded_fields(resource, req, document): field_extra_name = field + '.' field_extra_embedded = filter(lambda x: x.startswith(field_extra_name), extra_embedded) field_extra_embedded = map( lambda x: x.replace(field_extra_name, '', 1), field_extra_embedded) field_extra_embedded = list(field_extra_embedded) data_relation = new_field_definition(resource, field, document)['data_relation'] getter = lambda ref: embedded_document(ref, data_relation, field, field_extra_embedded) fields_chain = field.split('.') last_field = fields_chain[-1] for subdocument in common.subdocuments(fields_chain[:-1], document=document, resource=resource): if last_field not in subdocument: continue if isinstance(subdocument[last_field], list): embedded_value = list(map(getter, subdocument[last_field])) embedded_value = [x for x in embedded_value if x is not None] subdocument[last_field] = embedded_value else: embedded_value = getter(subdocument[last_field]) subdocument[last_field] = embedded_value
def aggregate(endpoint, base_pipeline, app, post_processing=None): before = datetime.now() collection = db[endpoint] pipeline = queries.resolve_pipeline(endpoint, base_pipeline) app.logger.debug(pipeline) data = list(collection.aggregate(pipeline)) total_items = len(data) if post_processing: for p in post_processing: data = p(data) ret_dict = {} page_number = int(request.args.get(app.config['QUERY_PAGE'], '0')) page_size = int(request.args.get(app.config['QUERY_MAX_RESULTS'], '0')) if page_number and page_size: data = data[page_size * (page_number - 1): page_size * page_number] req = parse_request(endpoint) ret_dict[app.config['META']] = _meta_links(req, total_items), ret_dict[app.config['LINKS']] = _pagination_links(endpoint, req, total_items) else: ret_dict[app.config['META']] = {'total': total_items} ret_dict[app.config['ITEMS']] = data j = jsonify(loads(dumps(ret_dict))) # cast ObjectIDs to dicts after = datetime.now() app.logger.info('Aggregated from %s in %s', endpoint, after - before) return j
def _perform_aggregation(resource, pipeline, options): """ .. versionadded:: 0.7 """ # TODO move most of this down to the Mongo layer? # TODO experiment with cursor.batch_size as alternative pagination # implementation def parse_aggregation_stage(d, key, value): is_collection = lambda item: isinstance(item, (list, dict)) if isinstance(d, dict): for st_key, st_value in d.items(): if key == st_value: d[st_key] = value elif is_collection(st_value): parse_aggregation_stage(st_value, key, value) elif isinstance(d, list): for st_idx, st_value in enumerate(d): if key == st_value: d[st_idx] = value elif is_collection(st_value): parse_aggregation_stage(st_value, key, value) response = {} documents = [] req = parse_request(resource) req_pipeline = copy.deepcopy(pipeline) if req.aggregation: try: query = json.loads(req.aggregation) except ValueError: abort(400, description='Aggregation query could not be parsed.') for key, value in query.items(): if key[0] != '$': pass for stage in req_pipeline: parse_aggregation_stage(stage, key, value) if req.max_results > 1: limit = {"$limit": req.max_results} skip = {"$skip": (req.page - 1) * req.max_results} req_pipeline.append(skip) req_pipeline.append(limit) cursor = app.data.aggregate(resource, req_pipeline, options) for document in cursor: documents.append(document) response[config.ITEMS] = documents # PyMongo's CommandCursor does not return a count, so we cannot # provide pagination/total count info as we do with a normal # (non-aggregate) GET request. return response, None, None, 200, []
def _get_lookup(): """Get the where clause lookup just like Eve does. Unfortunately, Eve only parses the `where` at a very low level and does not provide any methods to elegantly access it, so we have use the same internal functions as Eve does. (Recently, Eve has at least somewhat exposed the parsing, but this code is not part of an official release yet [1]) As soon as there is some 'official' support, this can be removed, as it is basically copied, with the abort removed for simplicity (as Eve itself will already abort if there's an error). [1]: https://github.com/pyeve/eve/blob/master/eve/io/mongo/mongo.py """ req = parse_request('studydocuments') if req and req.where: try: # Mongo Syntax return current_app.data._sanitize(json.loads(req.where)) except (HTTPException, json.JSONDecodeError): # Python Syntax return parse(req.where) return {} # No where clause
def get_document(resource, concurrency_check, **lookup): """ Retrieves and return a single document. Since this function is used by the editing methods (PUT, PATCH, DELETE), we make sure that the client request references the current representation of the document before returning it. However, this concurrency control may be turned off by internal functions. If resource enables soft delete, soft deleted documents will be returned, and must be handled by callers. :param resource: the name of the resource to which the document belongs to. :param concurrency_check: boolean check for concurrency control :param **lookup: document lookup query .. versionchanged:: 0.6 Return soft deleted documents. .. versionchanged:: 0.5 Concurrency control optional for internal functions. ETAG are now stored with the document (#369). .. versionchanged:: 0.0.9 More informative error messages. .. versionchanged:: 0.0.5 Pass current resource to ``parse_request``, allowing for proper processing of new configuration settings: `filters`, `sorting`, `paging`. """ req = parse_request(resource) if config.DOMAIN[resource]['soft_delete']: # get_document should always fetch soft deleted documents from the db # callers must handle soft deleted documents req.show_deleted = True document = app.data.find_one(resource, req, **lookup) if document: e_if_m = config.ENFORCE_IF_MATCH if_m = config.IF_MATCH if not req.if_match and e_if_m and if_m and concurrency_check: # we don't allow editing unless the client provides an etag # for the document or explicitly decides to allow editing by either # disabling the ``concurrency_check`` or ``IF_MATCH`` or # ``ENFORCE_IF_MATCH`` fields. abort(428, description='To edit a document ' 'its etag must be provided using the If-Match header') # ensure the retrieved document has LAST_UPDATED and DATE_CREATED, # eventually with same default values as in GET. document[config.LAST_UPDATED] = last_updated(document) document[config.DATE_CREATED] = date_created(document) if req.if_match and concurrency_check: ignore_fields = config.DOMAIN[resource]['etag_ignore_fields'] etag = document.get(config.ETAG, document_etag(document, ignore_fields=ignore_fields)) if req.if_match != etag: # client and server etags must match, or we don't allow editing # (ensures that client's version of the document is up to date) abort(412, description='Client and server etags don\'t match') return document
def test_parse_request_if_modified_since(self): ims = 'If-Modified-Since' with self.app.test_request_context(): self.assertEqual( parse_request(self.known_resource).if_modified_since, None) with self.app.test_request_context(headers=None): self.assertEqual( parse_request(self.known_resource).if_modified_since, None) with self.app.test_request_context(headers={ims: self.datestr}): self.assertEqual( parse_request(self.known_resource).if_modified_since, self.valid + timedelta(seconds=1)) with self.app.test_request_context(headers={ims: 'not-a-date'}): self.assertRaises(ValueError, parse_request, self.known_resource) with self.app.test_request_context( headers={ims: self.datestr.replace('UTC', 'GMT')}): self.assertRaises(ValueError, parse_request, self.known_resource)
def find_one(self, req, **lookup): session_doc = super().find_one(req, **lookup) user_doc = get_resource_service('users').find_one(req=None, _id=session_doc['user']) self.enhance_document_with_default_prefs(session_doc, user_doc) if req is None: req = parse_request('auth') session_doc['_etag'] = req.if_match return session_doc
def test_basic_search_query(self): with self.app.app_context(): self.app.data.insert('items', [{'uri': 'foo'}, {'uri': 'bar'}]) with self.app.test_request_context('/items/?q=foo'): req = parse_request('items') cursor = self.app.data.find('items', req, None) self.assertEquals(1, cursor.count())
def test_phrase_search_query(self): with self.app.app_context(): self.app.data.insert('items', [{ 'uri': 'foo bar' }, { 'uri': 'some text' }]) with self.app.test_request_context('/items/?q="foo bar"'): req = parse_request('items') cursor = self.app.data.find('items', req, None) self.assertEquals(1, cursor.count()) with self.app.test_request_context('/items/?q="bar foo"'): req = parse_request('items') cursor = self.app.data.find('items', req, None) self.assertEquals(0, cursor.count())
def getitem(resource, **lookup): """ Retrieves and returns a single document. :param resource: the name of the resource to which the document belongs. :param **lookup: the lookup query. .. versionchanged:: 0.0.7 Support for Rate-Limiting. .. versionchanged:: 0.0.6 Support for HEAD requests. .. versionchanged:: 0.0.6 ETag added to payload. .. versionchanged:: 0.0.5 Support for user-restricted access to resources. Support for LAST_UPDATED field missing from documents, because they were created outside the API context. .. versionchanged:: 0.0.4 Added the ``requires_auth`` decorator. .. versionchanged:: 0.0.3 Superflous ``response`` container removed. Links wrapped with ``_links``. Links are now properly JSON formatted. """ response = {} req = parse_request(resource) document = app.data.find_one(resource, **lookup) if document: # need to update the document field as well since the etag must # be computed on the same document representation that might have # been used in the collection 'get' method last_modified = document[config.LAST_UPDATED] = _last_updated(document) document["etag"] = document_etag(document) if req.if_none_match and document["etag"] == req.if_none_match: # request etag matches the current server representation of the # document, return a 304 Not-Modified. return response, last_modified, document["etag"], 304 if req.if_modified_since and last_modified <= req.if_modified_since: # request If-Modified-Since conditional request match. We test # this after the etag since Last-Modified dates have lower # resolution (1 second). return response, last_modified, document["etag"], 304 response["_links"] = { "self": document_link(resource, document[config.ID_FIELD]), "collection": collection_link(resource), "parent": home_link(), } response.update(document) return response, last_modified, document["etag"], 200 abort(404)
def find_one(self, req, **lookup): session_doc = super().find_one(req, **lookup) user_doc = get_resource_service('users').find_one( req=None, _id=session_doc['user']) self.enhance_document_with_default_prefs(session_doc, user_doc) if req is None: req = parse_request('auth') session_doc['_etag'] = req.if_match return session_doc
def me(): user = current_app.data.find('user', parse_request('user'), {'_id': session['user_id']})[0] return json.dumps({ 'id': session['meli_id'], 'email': session['meli_email'], 'preferences': user['preferences'] })
def get(resource): """Retrieves the resource documents that match the current request. :param resource: the name of the resource. .. versionchanged:: 0.0.6 Support for HEAD requests. .. versionchanged:: 0.0.5 Support for user-restricted access to resources. Support for LAST_UPDATED field missing from documents, because they were created outside the API context. .. versionchanged:: 0.0.4 Added the ``requires_auth`` decorator. .. versionchanged:: 0.0.3 Superflous ``response`` container removed. Collection items wrapped with ``_items``. Links wrapped with ``_links``. Links are now properly JSON formatted. """ documents = [] response = {} last_updated = _epoch() req = parse_request(resource) cursor = app.data.find(resource, req) for document in cursor: document[config.LAST_UPDATED] = _last_updated(document) document[config.DATE_CREATED] = _date_created(document) if document[config.LAST_UPDATED] > last_updated: last_updated = document[config.LAST_UPDATED] # document metadata document['etag'] = document_etag(document) document['_links'] = { 'self': document_link(resource, document[config.ID_FIELD]) } documents.append(document) if req.if_modified_since and len(documents) == 0: # the if-modified-since conditional request returned no documents, we # send back a 304 Not-Modified, which means that the client already # has the up-to-date representation of the resultset. status = 304 last_modified = None else: status = 200 last_modified = last_updated if last_updated > _epoch() else None response['_items'] = documents response['_links'] = _pagination_links(resource, req, cursor.count()) etag = None return response, last_modified, etag, status
def create(self, docs, **kwargs): if not docs: raise SuperdeskApiError.notFoundError('Content is missing') req = parse_request(self.datasource) try: get_component(ItemAutosave).autosave(docs[0]['_id'], docs[0], get_user(required=True), req.if_match) except InvalidEtag: raise SuperdeskApiError.preconditionFailedError('Client and server etags don\'t match') return [docs[0]['_id']]
def on_create(self, docs): if not docs: raise SuperdeskError('Content is missing', 400) req = parse_request(self.endpoint_name) c = get_component(ItemAutosave) try: c.autosave(docs[0]['_id'], docs[0], get_user(required=True), req.if_match) except InvalidEtag: raise SuperdeskError('Client and server etags don\'t match', 412)
def get(resource): """Retrieves the resource documents that match the current request. :param resource: the name of the resource. .. versionchanged:: 0.0.6 Support for HEAD requests. .. versionchanged:: 0.0.5 Support for user-restricted access to resources. Support for LAST_UPDATED field missing from documents, because they were created outside the API context. .. versionchanged:: 0.0.4 Added the ``requires_auth`` decorator. .. versionchanged:: 0.0.3 Superflous ``response`` container removed. Collection items wrapped with ``_items``. Links wrapped with ``_links``. Links are now properly JSON formatted. """ documents = [] response = {} last_updated = _epoch() req = parse_request(resource) cursor = app.data.find(resource, req) for document in cursor: document[config.LAST_UPDATED] = _last_updated(document) document[config.DATE_CREATED] = _date_created(document) if document[config.LAST_UPDATED] > last_updated: last_updated = document[config.LAST_UPDATED] # document metadata document['etag'] = document_etag(document) document['_links'] = {'self': document_link(resource, document[config.ID_FIELD])} documents.append(document) if req.if_modified_since and len(documents) == 0: # the if-modified-since conditional request returned no documents, we # send back a 304 Not-Modified, which means that the client already # has the up-to-date representation of the resultset. status = 304 last_modified = None else: status = 200 last_modified = last_updated if last_updated > _epoch() else None response['_items'] = documents response['_links'] = _pagination_links(resource, req, cursor.count()) etag = None return response, last_modified, etag, status
def test_parse_request_if_modified_since(self): ims = "If-Modified-Since" with self.app.test_request_context(): self.assertEqual( parse_request(self.known_resource).if_modified_since, None) with self.app.test_request_context(headers=None): self.assertEqual( parse_request(self.known_resource).if_modified_since, None) with self.app.test_request_context(headers={ims: self.datestr}): self.assertEqual( parse_request(self.known_resource).if_modified_since, self.valid + timedelta(seconds=1), ) with self.app.test_request_context(headers={ims: "not-a-date"}): self.assertRaises(ValueError, parse_request, self.known_resource) with self.app.test_request_context( headers={ims: self.datestr.replace("GMT", "UTC")}): self.assertRaises(ValueError, parse_request, self.known_resource) self.assertRaises(ValueError, parse_request, self.known_resource)
def get_offers(): res = [] likes = current_app.data.find('like', parse_request('like'), {}) for l in likes: if session['user_id'] == str(l['user']): offers = current_app.data.find('offer', parse_request('offer'), {}) os = [o for o in offers if str(o['product']) == str(l['product'])] avg = sum([o['original_price'] for o in os]) / (1.0 * offers.count()) os = sorted(os, key=lambda x: x['original_price']) least = os[0] least['avg'] = avg least['percent_discount'] = 100*max((avg - least['original_price']) / (1.0 * avg), 0) least['product'] = str(least['product']) least['_id'] = str(least['_id']) res.append(least) return json.dumps(res)
def get_document(resource, concurrency_check, **lookup): """ Retrieves and return a single document. Since this function is used by the editing methods (PUT, PATCH, DELETE), we make sure that the client request references the current representation of the document before returning it. However, this concurrency control may be turned off by internal functions. If resource enables soft delete, soft deleted documents will be returned, and must be handled by callers. :param resource: the name of the resource to which the document belongs to. :param concurrency_check: boolean check for concurrency control :param **lookup: document lookup query .. versionchanged:: 0.6 Return soft deleted documents. .. versionchanged:: 0.5 Concurrency control optional for internal functions. ETAG are now stored with the document (#369). .. versionchanged:: 0.0.9 More informative error messages. .. versionchanged:: 0.0.5 Pass current resource to ``parse_request``, allowing for proper processing of new configuration settings: `filters`, `sorting`, `paging`. """ req = parse_request(resource) if config.DOMAIN[resource]['soft_delete']: # get_document should always fetch soft deleted documents from the db # callers must handle soft deleted documents req.show_deleted = True document = app.data.find_one(resource, req, **lookup) if document: if not req.if_match and config.IF_MATCH and concurrency_check: # we don't allow editing unless the client provides an etag # for the document abort(428, description='To edit a document ' 'its etag must be provided using the If-Match header') # ensure the retrieved document has LAST_UPDATED and DATE_CREATED, # eventually with same default values as in GET. document[config.LAST_UPDATED] = last_updated(document) document[config.DATE_CREATED] = date_created(document) if req.if_match and concurrency_check: ignore_fields = config.DOMAIN[resource]['etag_ignore_fields'] etag = document.get(config.ETAG, document_etag(document, ignore_fields=ignore_fields)) if req.if_match != etag: # client and server etags must match, or we don't allow editing # (ensures that client's version of the document is up to date) abort(412, description='Client and server etags don\'t match') return document
def getitem(resource, **lookup): """ Retrieves and returns a single document. :param resource: the name of the resource to which the document belongs. :param **lookup: the lookup query. .. versionchanged:: 0.0.6 Support for HEAD requests. .. versionchanged:: 0.0.6 ETag added to payload. .. versionchanged:: 0.0.5 Support for user-restricted access to resources. Support for LAST_UPDATED field missing from documents, because they were created outside the API context. .. versionchanged:: 0.0.4 Added the ``requires_auth`` decorator. .. versionchanged:: 0.0.3 Superflous ``response`` container removed. Links wrapped with ``_links``. Links are now properly JSON formatted. """ response = {} req = parse_request(resource) document = app.data.find_one(resource, **lookup) if document: # need to update the document field as well since the etag must # be computed on the same document representation that might have # been used in the collection 'get' method last_modified = document[config.LAST_UPDATED] = _last_updated(document) document['etag'] = document_etag(document) if req.if_none_match and document['etag'] == req.if_none_match: # request etag matches the current server representation of the # document, return a 304 Not-Modified. return response, last_modified, document['etag'], 304 if req.if_modified_since and last_modified <= req.if_modified_since: # request If-Modified-Since conditional request match. We test # this after the etag since Last-Modified dates have lower # resolution (1 second). return response, last_modified, document['etag'], 304 response['_links'] = { 'self': document_link(resource, document[config.ID_FIELD]), 'collection': collection_link(resource), 'parent': home_link() } response.update(document) return response, last_modified, document['etag'], 200 abort(404)
def test_basic_search_query(self): with self.app.app_context(): self.app.data.insert('items', [ {'uri': 'foo'}, {'uri': 'bar'} ]) with self.app.test_request_context('/items/?q=foo'): req = parse_request('items') cursor = self.app.data.find('items', req, None) self.assertEquals(1, cursor.count())
def create(self, docs, **kwargs): if not docs: raise SuperdeskApiError.notFoundError("Content is missing") req = parse_request(self.datasource) try: get_component(ItemAutosave).autosave(docs[0]["_id"], docs[0], get_user(required=True), req.if_match) except InvalidEtag: raise SuperdeskApiError.preconditionFailedError("Client and server etags don't match") except KeyError: raise SuperdeskApiError.badRequestError("Request for Auto-save must have _id") return [docs[0]["_id"]]
def test_elastic_filter_callback(self): with self.app.app_context(): self.app.data.insert('items_with_callback_filter', [ {'uri': 'foo'}, {'uri': 'bar'}, ]) with self.app.test_request_context(): req = parse_request('items_with_callback_filter') cursor = self.app.data.find('items_with_callback_filter', req, None) self.assertEqual(1, cursor.count())
def like(product_id): product = current_app.data.find("product", parse_request('product'), {'_id': product_id})[0] category = current_app.data.find("category", parse_request('category'), {'_id': product['category']})[0] current_app.data.insert('like', {'does_like': request.args.get('value', 'true').lower() == 'true', 'user': session['user_id'], 'product': product_id, 'viewed': False}) items = json.loads(meli.get("/sites/MLA/search/?category=%s&q=%s" % (category['meli_id'], product['description'])).content) for item in items['results']: offer = { 'meli_seller': item['seller']['id'], 'meli_item_id': item['id'], 'meli_link': item['permalink'], 'meli_image': item['thumbnail'], 'product': product_id, 'original_price': item['price'], 'new_price': None } current_app.data.insert('offer', offer) return ""
def get_offers(): res = [] likes = current_app.data.find('like', parse_request('like'), {}) for l in likes: if session['user_id'] == str(l['user']): offers = current_app.data.find('offer', parse_request('offer'), {}) os = [o for o in offers if str(o['product']) == str(l['product'])] avg = sum([o['original_price'] for o in os]) / (1.0 * offers.count()) os = sorted(os, key=lambda x: x['original_price']) least = os[0] least['avg'] = avg least['percent_discount'] = 100 * max( (avg - least['original_price']) / (1.0 * avg), 0) least['product'] = str(least['product']) least['_id'] = str(least['_id']) res.append(least) return json.dumps(res)
def find_one(self, req, **lookup): session_doc = super().find_one(req, **lookup) user_doc = get_resource_service('users').find_one(req=None, _id=session_doc['user']) self.enhance_document_with_default_prefs(session_doc, user_doc) self.enhance_document_with_user_privileges(session_doc, user_doc) session_doc[_action_key] = get_privileged_actions(session_doc[_privileges_key]) if req is None: req = parse_request('auth') session_doc['_etag'] = req.if_match else: session_doc['_etag'] = document_etag(session_doc) return session_doc
def test_parse_request_page(self): self.assertEqual(parse_request().page, 1) self.assertEqual(parse_request(args={'page': 2}).page, 2) self.assertEqual(parse_request(args={'page': -1}).page, 1) self.assertEqual(parse_request(args={'page': 0}).page, 1) self.assertEqual(parse_request(args={'page': 1.1}).page, 1) self.assertEqual(parse_request(args={'page': 'string'}).page, 1)
def test_parse_request_page(self): self.assertEqual(parse_request().page, 1) self.assertEqual(parse_request({'page': 2}).page, 2) self.assertEqual(parse_request({'page': -1}).page, 1) self.assertEqual(parse_request({'page': 0}).page, 1) self.assertEqual(parse_request({'page': 1.1}).page, 1) self.assertEqual(parse_request({'page': 'string'}).page, 1)
def test_elastic_sort_by_score_if_there_is_query(self): with self.app.app_context(): self.app.data.insert('items', [ {'uri': 'foo', 'name': 'foo bar'}, {'uri': 'bar', 'name': 'foo bar'} ]) with self.app.test_request_context('/items/'): req = parse_request('items') req.args = {'q': 'foo'} cursor = self.app.data.find('items', req, None) self.assertEqual(2, cursor.count()) self.assertEqual('foo', cursor[0]['uri'])
def test_list_by_where(self): with self.app.app_context(): self.app.data.insert('items_with_additional_lookup', [ {'uri': 'foo'}, {'uri': 'bar'}, ]) with self.app.test_request_context('/items_with_additional_lookup/'): req = parse_request('items_with_additional_lookup') req.args = {'where': '{"uri":"foo"}'} resp = self.app.data.find('items_with_additional_lookup', req, {}) self.assertEqual(1, resp.count()) self.assertEqual('foo', resp[0]['uri'])
def get_internal(resource, **lookup): response = {} documents = [] req = parse_request(resource) embedded_fields = resolve_embedded_fields(resource, req) # req.show_deleted = True cursor = app.data.find(resource, req, lookup) for document in cursor: build_response_document(document, resource, embedded_fields) documents.append(document) response[app.config['ITEMS']] = documents return response, None
def _perform_aggregation(resource, pipeline, options): """ .. versionadded:: 0.7 """ # TODO move most of this down to the Mongo layer? # TODO experiment with cursor.batch_size as alternative pagination # implementation def parse_aggregation_stage(d, key, value): for st_key, st_value in d.items(): if isinstance(st_value, dict): parse_aggregation_stage(st_value, key, value) if key == st_value: d[st_key] = value response = {} documents = [] req = parse_request(resource) req_pipeline = copy.deepcopy(pipeline) if req.aggregation: try: query = json.loads(req.aggregation) except ValueError: abort(400, description='Aggregation query could not be parsed.') for key, value in query.items(): if key[0] != '$': pass for stage in req_pipeline: parse_aggregation_stage(stage, key, value) if req.max_results > 1: limit = {"$limit": req.max_results} skip = {"$skip": (req.page - 1) * req.max_results} req_pipeline.append(skip) req_pipeline.append(limit) cursor = app.data.aggregate(resource, req_pipeline, options) for document in cursor: documents.append(document) response[config.ITEMS] = documents # PyMongo's CommandCursor does not return a count, so we cannot # provide pagination/total count info as we do with a normal # (non-aggregate) GET request. return response, None, None, 200, []
def find_one(self, req, **lookup): session_doc = super().find_one(req, **lookup) user_doc = get_resource_service('users').find_one( req=None, _id=session_doc['user']) self.enhance_document_with_default_prefs(session_doc, user_doc) self.enhance_document_with_user_privileges(session_doc, user_doc) session_doc[_action_key] = get_privileged_actions( session_doc[_privileges_key]) if req is None: req = parse_request('auth') session_doc['_etag'] = req.if_match else: session_doc['_etag'] = document_etag(session_doc) return session_doc
def update_amazon_products(): categories = current_app.data.find('category', parse_request('category'), {}) app.logger.debug('Categories {}'.format(categories)) for category in categories: app.logger.debug('Category {}'.format(category)) for product in amazon_api.item_search(category['amazon_name'], Keywords=category['name']): app.logger.debug('Product ASIN {}'.format(product.ASIN)) exists = current_app.data.find('product', parse_request('product'), {'product_id': str(product.ASIN)}).count() > 0 if not exists: try: image_url = amazon_api.item_lookup(str(product.ASIN), ResponseGroup='Images').Items.Item.LargeImage.URL product_entry = { 'product_id': str(product.ASIN), 'image_url': str(image_url), 'description': unicode(product.ItemAttributes.Title), 'category': category['_id'] } app.logger.debug('Entry {}'.format(product_entry)) current_app.data.insert('product', product_entry) except AttributeError: continue return ''
def get_document(resource, concurrency_check, **lookup): """ Retrieves and return a single document. Since this function is used by the editing methods (POST, PATCH, DELETE), we make sure that the client request references the current representation of the document before returning it. However, this concurrency control may be turned off by internal functions. :param resource: the name of the resource to which the document belongs to. :param concurrency_check: boolean check for concurrency control :param **lookup: document lookup query .. versionchanged:: 0.5 Concurrency control optional for internal functions. ETAG are now stored with the document (#369). .. versionchanged:: 0.0.9 More informative error messages. .. versionchanged:: 0.0.5 Pass current resource to ``parse_request``, allowing for proper processing of new configuration settings: `filters`, `sorting`, `paging`. """ req = parse_request(resource) document = app.data.find_one(resource, None, **lookup) if document: if not req.if_match and config.IF_MATCH and concurrency_check: # we don't allow editing unless the client provides an etag # for the document abort(403, description=debug_error_message( 'An etag must be provided to edit a document' )) # ensure the retrieved document has LAST_UPDATED and DATE_CREATED, # eventually with same default values as in GET. document[config.LAST_UPDATED] = last_updated(document) document[config.DATE_CREATED] = date_created(document) if req.if_match and concurrency_check: etag = document.get(config.ETAG, document_etag(document)) if req.if_match != etag: # client and server etags must match, or we don't allow editing # (ensures that client's version of the document is up to date) abort(412, description=debug_error_message( 'Client and server etags don\'t match' )) return document