def create(self): data = validator.parse_post( ['content'], ['identifier', 'index', 'indexes', 'metadata']) indexes = validator.validate_indexes(data) if indexes is None: error('You must specify either an "index" or "indexes".') if data.get('identifier'): try: document = self._get_document(data['identifier']) except NotFound: pass else: return self.update(data['identifier']) document = Document.create(content=data['content'], identifier=data.get('identifier')) if data.get('metadata'): document.metadata = data['metadata'] logger.info('Created document with id=%s', document.get_id()) for index in indexes: index.add_to_index(document) logger.info('Added document %s to index %s', document.get_id(), index.name) if len(request.files): self.attach_files(document) return self.detail(document.get_id())
def create(self): data = validator.parse_post(['name']) with database.atomic(): try: index = Index.create(name=data['name']) except IntegrityError: error('"%s" already exists.' % data['name']) else: logger.info('Created new index "%s"' % index.name) return self.detail(index.name)
def update(self, pk): index = get_object_or_404(Index, Index.name == pk) data = validator.parse_post(['name']) index.name = data['name'] with database.atomic(): try: index.save() except IntegrityError: error('"%s" is already in use.' % index.name) else: logger.info('Updated index "%s"' % index.name) return self.detail(index.name)
def parse_post(self, required_keys=None, optional_keys=None): """ Clean and validate POSTed JSON data by defining sets of required and optional keys. """ if request.headers.get('content-type') == 'application/json': data = request.data elif 'data' not in request.form: error('Missing correct content-type or missing "data" field.') else: data = request.form['data'] if data: try: data = json_load(data) except ValueError: error('Unable to parse JSON data from request.') else: data = {} required = set(required_keys or ()) optional = set(optional_keys or ()) all_keys = required | optional keys_present = set(key for key in data if data[key] not in ('', None)) missing = required - keys_present if missing: error('Missing required fields: %s' % ', '.join(sorted(missing))) invalid_keys = keys_present - all_keys if invalid_keys: error('Invalid keys: %s' % ', '.join(sorted(invalid_keys))) return data
def update(self, document_id, pk): document = self._get_document(document_id) attachment = self._get_attachment(document, pk) validator.parse_post([], []) # Ensure POST data is clean. nfiles = len(request.files) if nfiles == 1: attachment.delete_instance() self.attach_files(document) elif nfiles > 1: error('Only one attachment permitted when performing update.') else: error('No file attachment found.') return self.detail(document.get_id(), attachment.filename)
def create(self, document_id): document = self._get_document(document_id) validator.parse_post([], []) # Ensure POST data is clean. if len(request.files): attachments = self.attach_files(document) else: error('No file attachments found.') return jsonify({ 'attachments': [ attachment_serializer.serialize(attachment) for attachment in attachments ] })
def _search_response(self, index, allow_blank, document_count): ranking = request.args.get('ranking') or SEARCH_BM25 if ranking not in RANKING_CHOICES: error('Unrecognized "ranking" value. Valid options are %s' % ', '.join(RANKING_CHOICES)) ordering = request.args.getlist('ordering') filters = validator.extract_get_params() q = request.args.get('q', '').strip() if not q and not allow_blank: error('Search term is required.') query = engine.search(q or '*', index, ranking, ordering, **filters) include_score = (q != '*' and ranking != SEARCH_NONE) pq = self.paginated_query(query) response = { 'document_count': document_count, 'documents': document_serializer.serialize_query(pq.get_object_list(), include_score=include_score), 'filtered_count': query.count(), 'filters': filters, 'ordering': ordering, 'page': pq.get_page(), 'pages': pq.get_page_count(), } if q: response.update(ranking=ranking, search_term=q) return response
def validate_indexes(self, data, required=True): if data.get('index'): index_names = (data['index'], ) elif data.get('indexes'): index_names = data['indexes'] elif ('index' in data or 'indexes' in data) and not required: return () else: return None indexes = list(Index.select().where(Index.name << index_names)) # Validate that all the index names exist. observed_names = set(index.name for index in indexes) invalid_names = [] for index_name in index_names: if index_name not in observed_names: invalid_names.append(index_name) if invalid_names: error('The following indexes were not found: %s.' % ', '.join(invalid_names)) return indexes