def ops_published_data_crawl_handler(request): """Crawl published-data at OPS""" # constituents: abstract, biblio and/or full-cycle constituents = request.matchdict.get('constituents', 'full-cycle') print 'constituents:', constituents # CQL query string query = request.params.get('expression', '') log.info('query raw: ' + query) query_object, query = cql_prepare_query(query) propagate_keywords(request, query_object) log.info('query cql: ' + query) chunksize = int(request.params.get('chunksize', '100')) try: result = ops_published_data_crawl(constituents, query, chunksize) return result except Exception as ex: log.error( u'OPS crawler error: query="{0}", reason={1}, Exception was:\n{2}'. format(query, ex, _exception_traceback())) request.errors.add('ops-published-data-crawl', 'query', str(ex))
def depatisnet_published_data_search_handler(request): """Search for published-data at DEPATISnet""" #pprint(request.params) # CQL query string query = request.params.get('expression', '') log.info('query raw: ' + query) # lazy-fetch more entries up to maximum of depatisnet # TODO: get from patzilla.access.dpma.depatisnet request_size = 250 if int(request.params.get('range_begin', 0)) > request_size: request_size = 1000 # Compute query options, like # - limit # - sorting # - whether to remove family members options = {} options.update({'limit': request_size}) # propagate request parameters to search options parameters request_to_options(request, options) # transcode CQL query query_object, query = cql_prepare_query(query) log.info('query cql: ' + query) # propagate keywords to highlighting component propagate_keywords(request, query_object) try: return dpma_published_data_search(query, options) except SyntaxError as ex: request.errors.add('depatisnet-search', 'expression', str(ex.msg)) log.warn(request.errors) except NoResultsException as ex: # Forward response to let the frontend recognize zero hits request.response.status = HTTPNotFound.code return ex.data except Exception as ex: message = handle_generic_exception(request, ex, 'depatisnet-search', query) request.errors.add('depatisnet-search', 'search', message)
def depatisnet_published_data_crawl_handler(request): """Crawl published-data at DEPATISnet""" # CQL query string query = request.params.get('expression', '') log.info('query raw: ' + query) query_object, query = cql_prepare_query(query) propagate_keywords(request, query_object) chunksize = 1000 # Compute query options, like # - limit # - sorting # - whether to remove family members options = {} options.update({'limit': chunksize}) # propagate request parameters to search options parameters request_to_options(request, options) log.info('query cql: ' + query) try: result = dpma_published_data_search(query, options) return result except SyntaxError as ex: request.errors.add('depatisnet-search', 'expression', str(ex.msg)) log.warn(request.errors) except Exception as ex: http_response = None if hasattr(ex, 'http_response'): http_response = ex.http_response log.error(u'DEPATISnet crawler error: query="{0}", reason={1}\nresponse:\n{2}\nexception:\n{3}'.format( query, ex, http_response, _exception_traceback())) message = u'An exception occurred while processing your query<br/>Reason: {}'.format(ex) request.errors.add('depatisnet-search', 'crawl', message)
def ops_published_data_search_handler(request): """Search for published-data at OPS""" # Constituents: abstract, biblio and/or full-cycle constituents = request.params.get('constituents', 'full-cycle') # CQL query string query = request.params.get('expression', '') log.info('query raw: %s', query) query_object, query = cql_prepare_query(query) log.info('query cql: %s', query) # range: x-y, maximum delta is 100, default is 25 range = request.params.get('range') range = range or '1-25' # Search options family_swap_default = asbool(request.params.get('family_swap_default')) try: if family_swap_default: result = ops_published_data_search_swap_family( constituents, query, range) else: result = ops_published_data_search(constituents, query, range) propagate_keywords(request, query_object) return result except NoResultsException as ex: # Forward response to let the frontend recognize zero hits request.response.status = HTTPNotFound.code return ex.data except Exception as ex: message = handle_generic_exception(request, ex, 'ops-search', query) request.errors.add('ops-search', 'search', message) log.info('query finished')
def ificlaims_published_data_search_handler(request): """Search for published-data at IFI CLAIMS Direct""" # Get hold of query expression and filter query = SmartBunch({ 'expression': request.params.get('expression', ''), 'filter': request.params.get('filter', ''), }) log.info('Query: {}'.format(query)) # Parse expression, extract and propagate keywords to user interface parser = IFIClaimsParser(query.expression) propagate_keywords(request, parser) # Fixup query: wrap into quotes if cql string is a) unspecific, b) contains spaces and c) is still unquoted if should_be_quoted(query.expression): query.expression = '"%s"' % query.expression # Lazy-fetch more entries # TODO: get from patzilla.access.ificlaims limit = 250 offset_local = int(request.params.get('range_begin', 0)) offset_remote = int(offset_local / limit) * limit # Compute query options, like # - limit # - sorting # - whether to remove family members options = SmartBunch() options.update({ 'limit': limit, 'offset': offset_remote, }) # Propagate request parameters to search options parameters request_to_options(request, options) try: data = ificlaims_search(query, options) #print data.prettify() # debugging return data except LoginException as ex: request.errors.add('ificlaims-search', 'login', ex.details) log.warn(request.errors) except SyntaxError as ex: request.errors.add('ificlaims-search', 'expression', unicode(ex.msg)) log.warn(request.errors) except SearchException as ex: message = ex.get_message() request.errors.add('ificlaims-search', 'search', message) log.warn(request.errors) except NoResultsException as ex: # Forward response to let the frontend recognize zero hits request.response.status = HTTPNotFound.code return ex.data except OperationFailure as ex: message = unicode(ex) request.errors.add('ificlaims-search', 'internals', message) log.error(request.errors) except Exception as ex: message = handle_generic_exception(request, ex, 'ificlaims-search', query) request.errors.add('ificlaims-search', 'search', message)