def ops_published_data_crawl_handler(request): """Crawl published-data at OPS""" # constituents: abstract, biblio and/or full-cycle constituents = request.matchdict.get('constituents', 'full-cycle') print('constituents:', constituents) # CQL query string query = request.params.get('expression', '') log.info('query raw: ' + query) # Transcode CQL query expression search = cql_prepare_query(query) # Propagate keywords to highlighting component keywords_to_response(request, search=search) log.info('query cql: ' + search.expression) chunksize = int(request.params.get('chunksize', '100')) try: result = ops_published_data_crawl(constituents, search.expression, chunksize) return result except Exception as ex: log.error( 'OPS crawler error: query="{0}", reason={1}, Exception was:\n{2}'. format(query, ex, _exception_traceback())) request.errors.add('ops-published-data-crawl', 'query', str(ex))
def ops_published_data_crawl_handler(request): """Crawl published-data at OPS""" # constituents: abstract, biblio and/or full-cycle constituents = request.matchdict.get('constituents', 'full-cycle') print 'constituents:', constituents # CQL query string query = request.params.get('expression', '') log.info('query raw: ' + query) query_object, query = cql_prepare_query(query) propagate_keywords(request, query_object) log.info('query cql: ' + query) chunksize = int(request.params.get('chunksize', '100')) try: result = ops_published_data_crawl(constituents, query, chunksize) return result except Exception as ex: log.error( u'OPS crawler error: query="{0}", reason={1}, Exception was:\n{2}'. format(query, ex, _exception_traceback())) request.errors.add('ops-published-data-crawl', 'query', str(ex))
def parse(self): if self.query_object: return self # Parse CQL expression and extract keywords self.search = cql_prepare_query(self.expression, grammar=IFIClaimsGrammar, keyword_fields=IFIClaimsExpression.fieldnames) self.query_object = self.search.cql_parser return self
def parse(self): if self.query_object: return self # Parse expression, extract and propagate keywords self.query_object, query_recompiled = cql_prepare_query( self.expression, grammar=DepaTechGrammar, keyword_fields=DepaTechExpression.fieldnames) return self
def depatisnet_published_data_search_handler(request): """Search for published-data at DEPATISnet""" #pprint(request.params) # CQL query string query = request.params.get('expression', '') log.info('query raw: ' + query) # lazy-fetch more entries up to maximum of depatisnet # TODO: get from patzilla.access.dpma.depatisnet request_size = 250 if int(request.params.get('range_begin', 0)) > request_size: request_size = 1000 # Compute query options, like # - limit # - sorting # - whether to remove family members options = {} options.update({'limit': request_size}) # propagate request parameters to search options parameters request_to_options(request, options) # transcode CQL query query_object, query = cql_prepare_query(query) log.info('query cql: ' + query) # propagate keywords to highlighting component propagate_keywords(request, query_object) try: return dpma_published_data_search(query, options) except SyntaxError as ex: request.errors.add('depatisnet-search', 'expression', str(ex.msg)) log.warn(request.errors) except NoResultsException as ex: # Forward response to let the frontend recognize zero hits request.response.status = HTTPNotFound.code return ex.data except Exception as ex: message = handle_generic_exception(request, ex, 'depatisnet-search', query) request.errors.add('depatisnet-search', 'search', message)
def ops_published_data_search_handler(request): """Search for published-data at OPS""" # Constituents: abstract, biblio and/or full-cycle constituents = request.params.get('constituents', 'full-cycle') # CQL query string query = request.params.get('expression', '') log.info('query raw: %s', query) # Transcode CQL query expression search = cql_prepare_query(query) log.info('query cql: %s', search.expression) # range: x-y, maximum delta is 100, default is 25 range = request.params.get('range') range = range or '1-25' # Search options family_swap_default = asbool(request.params.get('family_swap_default')) try: if family_swap_default: result = ops_published_data_search_swap_family( constituents, search.expression, range) else: result = ops_published_data_search(constituents, search.expression, range) # Propagate keywords to highlighting component keywords_to_response(request, search=search) return result except NoResultsException as ex: # Forward response to let the frontend recognize zero hits request.response.status = HTTPNotFound.code return ex.data except Exception as ex: message = handle_generic_exception(request, ex, 'ops-search', search.expression) request.errors.add('ops-search', 'search', message) log.info('query finished')
def prepare_search(request): #pprint(request.params) # CQL expression string expression = request.params.get('expression', '').strip() # Compute expression syntax syntax_cql = asbool( request.params.get('query_data[modifiers][syntax-cql]')) syntax_ikofax = asbool( request.params.get('query_data[modifiers][syntax-ikofax]')) syntax = 'cql' if syntax_ikofax or expression.startswith('ikofax:'): expression = expression.replace('ikofax:', '') syntax = 'ikofax' log.info(u'DEPATISnet query: {}, syntax: {}'.format(expression, syntax)) # Compute query options, like # - limit # - sorting # - whether to remove family members options = {} options.update({'syntax': syntax}) # propagate request parameters to search options parameters request_to_options(request, options) # Transcode query expression if syntax == 'cql': search = cql_prepare_query(expression) elif syntax == 'ikofax': search = ikofax_prepare_query(expression) else: request.errors.add('depatisnet-search', 'expression', u'Unknown syntax {}'.format(syntax)) # Propagate keywords to highlighting component keywords_to_response(request, search=search) return search, options
def depatisnet_published_data_crawl_handler(request): """Crawl published-data at DEPATISnet""" # CQL query string query = request.params.get('expression', '') log.info('query raw: ' + query) query_object, query = cql_prepare_query(query) propagate_keywords(request, query_object) chunksize = 1000 # Compute query options, like # - limit # - sorting # - whether to remove family members options = {} options.update({'limit': chunksize}) # propagate request parameters to search options parameters request_to_options(request, options) log.info('query cql: ' + query) try: result = dpma_published_data_search(query, options) return result except SyntaxError as ex: request.errors.add('depatisnet-search', 'expression', str(ex.msg)) log.warn(request.errors) except Exception as ex: http_response = None if hasattr(ex, 'http_response'): http_response = ex.http_response log.error(u'DEPATISnet crawler error: query="{0}", reason={1}\nresponse:\n{2}\nexception:\n{3}'.format( query, ex, http_response, _exception_traceback())) message = u'An exception occurred while processing your query<br/>Reason: {}'.format(ex) request.errors.add('depatisnet-search', 'crawl', message)