def word_cloud(): """ View that creates the data for the word cloud """ query_url = config.SOLRQUERY_URL tvrh_query_url = query_url.rsplit('/', 1)[0] + '/tvrh' try: query_components = json.loads(request.values.get('current_search_parameters')) except (TypeError, JSONDecodeError): #@todo: logging of the error return render_template('errors/generic_error.html', error_message='Error while creating the word cloud (code #1). Please try later.') # get the maximum number of records to use query_components['rows'] = request.values.get('numRecs', config.MAX_EXPORTS['wordcloud']) # checked bibcodes will be input as if request.values.has_key('bibcode'): bibcodes = request.values.getlist('bibcode') query_components['q'] = ' OR '.join(["bibcode:%s" % b for b in bibcodes]) query_components.update({ 'facets': [], 'fields': ['id'], 'highlights': [], 'defType':'aqp', 'tv': 'true', 'tv.tf_idf': 'true', 'tv.tf': 'true', 'tv.positions':'false', 'tf.offsets':'false', 'tv.fl':'abstract,title', 'fl':'abstract,title' }) req = solr.create_request(**query_components) url = tvrh_query_url if 'bigquery' in request.values: from adsabs.core.solr import bigquery bigquery.prepare_bigquery_request(req, request.values['bigquery']) url = config.SOLRBIGQUERY_URL req = solr.set_defaults(req, query_url=url) resp = solr.get_response(req) if resp.is_error(): return render_template('errors/generic_error.html', error_message='Error while creating the word cloud (code #2). Please try later.') statsd.incr("visualization.word_cloud.viewed") return render_template('word_cloud_embedded.html', wordcloud_data=wc_json(resp.raw_response()))
def get_publications_from_query(query_components, list_type=None, bigquery_id=None): try: # Get the information from Solr if list_type and list_type == 'similar': resp = get_document_similar(**query_components) else: req = solr.create_request(**query_components) if bigquery_id: from adsabs.core.solr import bigquery bigquery.prepare_bigquery_request(req, bigquery_id) req = solr.set_defaults(req) resp = solr.get_response(req) except SolrReferenceQueryError, e: app.logger.error("Solr publications query for %s blew up (%s)" % (q,e)) raise
def paper_network(): """ View that creates the data for the paper network """ #if there are no bibcodes, there should be a query to extract the bibcodes try: query_components = json.loads(request.values.get('current_search_parameters')) except (TypeError, JSONDecodeError): #@todo: logging of the error return render_template('errors/generic_error.html', error_message='Error while creating the paper network (code #1). Please try later.') # get the maximum number of records to use query_components['rows'] = request.values.get('numRecs', config.MAX_EXPORTS['papernetwork']) # checked bibcodes will be input as if request.values.has_key('bibcode'): bibcodes = request.values.getlist('bibcode') query_components['q'] = ' OR '.join(["bibcode:%s" % b for b in bibcodes]) #update the query parameters to return only what is necessary query_components.update({ 'facets': [], 'fields': ['bibcode,title,first_author,year','citation_count','read_count','reference'], 'highlights': [], }) req = solr.create_request(**query_components) url = None if 'bigquery' in request.values: from adsabs.core.solr import bigquery bigquery.prepare_bigquery_request(req, request.values['bigquery']) url = config.SOLRBIGQUERY_URL req = solr.set_defaults(req, query_url=url) resp = solr.get_response(req) if resp.is_error(): return render_template('errors/generic_error.html', error_message='Error while creating the paper network (code #2). Please try later.') # prepare the info to send to the paper network machinery paper_info = [doc.__dict__['data'] for doc in resp.get_docset_objects() if doc.bibcode] statsd.incr("visualization.paper_network.viewed") return render_template('paper_network_embedded.html', network_data=get_papernetwork(paper_info))
def author_network(): """ View that creates the data for the author network """ #if there are not bibcodes, there should be a query to extract the authors try: query_components = json.loads(request.values.get('current_search_parameters')) except (TypeError, JSONDecodeError): #@todo: logging of the error return render_template('errors/generic_error.html', error_message='Error while creating the author network (code #1). Please try later.') # get the maximum number of records to use query_components['rows'] = request.values.get('numRecs', config.MAX_EXPORTS['authnetwork']) # checked bibcodes will be input as if request.values.has_key('bibcode'): bibcodes = request.values.getlist('bibcode') query_components['q'] = ' OR '.join(["bibcode:%s" % b for b in bibcodes]) #update the query parameters to return only what is necessary query_components.update({ 'facets': [], 'fields': ['author_norm'], 'highlights': [], }) req = solr.create_request(**query_components) if 'bigquery' in request.values: from adsabs.core.solr import bigquery bigquery.prepare_bigquery_request(req, request.values['bigquery']) req = solr.set_defaults(req) resp = solr.get_response(req) if resp.is_error(): return render_template('errors/generic_error.html', error_message='Error while creating the author network (code #2). Please try later.') #extract the authors lists_of_authors = [doc.author_norm for doc in resp.get_docset_objects() if doc.author_norm] statsd.incr("visualization.author_network.viewed") return render_template('author_network_embedded.html', network_data=get_authorsnetwork(lists_of_authors))
def test_haproxy_cookie(self): """ Uses the http://httpbin.org/ service to check that the haproxy "sticky session" cookie is included in solr requests """ from flask import g with self.app.test_request_context(): # the httpbin thing will only work with GET requests orig_method = solr.request_http_method solr.request_http_method = 'GET' self.app.preprocess_request() req = solr.create_request("foo") resp = solr.get_response(req, query_url='http://httpbin.org/cookies') expected = { config.SOLR_HAPROXY_SESSION_COOKIE_NAME: g.user_cookie_id } self.assertDictContainsSubset(expected, resp.raw['cookies']) solr.request_http_method = orig_method
def search(): """ returns the results of a search """ if not len(request.values): form = QueryForm(csrf_enabled=False) # prefill the database select menu option form.db_f.default = config.SEARCH_DEFAULT_DATABASE else: form = QueryForm.init_with_defaults(request.values) if form.validate(): query_components = QueryBuilderSearch.build(form, request.values) bigquery_id = request.values.get('bigquery') try: req = solr.create_request(**query_components) url = None if bigquery_id: prepare_bigquery_request(req, request.values['bigquery']) url = config.SOLRBIGQUERY_URL req = solr.set_defaults(req, query_url=url) with statsd.timer("search.solr.query_response_time"): resp = solr.get_response(req) statsd.incr("search.solr.executed") if bigquery_id: facets = resp.get_facet_parameters() facets.append(('bigquery', bigquery_id)) except Exception, e: statsd.incr("search.solr.failed") raise AdsabsSolrqueryException("Error communicating with search service", sys.exc_info()) if resp.is_error(): statsd.incr("search.solr.error") flash(resp.get_error_message(), 'error') return render_template('search_results.html', resp=resp, form=form, query_components=query_components, bigquery_id=bigquery_id) else:
def alladin_lite(): """ View that creates the data for alladin lite """ #if there are not bibcodes, there should be a query to extract the authors if request.values.has_key('bibcode'): bibcodes = request.values.getlist('bibcode') else: try: query_components = json.loads(request.values.get('current_search_parameters')) except (TypeError, JSONDecodeError): #@todo: logging of the error return render_template('errors/generic_error.html', error_message='Error. Please try later.') # get the maximum number of records to use query_components['rows'] = request.values.get('numRecs', config.MAX_EXPORTS['skymap']) #update the query parameters to return only what is necessary query_components.update({ 'facets': [], 'fields': ['bibcode'], 'highlights': [], }) req = solr.create_request(**query_components) url = None if 'bigquery' in request.values: from adsabs.core.solr import bigquery bigquery.prepare_bigquery_request(req, request.values['bigquery']) url = config.SOLRBIGQUERY_URL req = solr.set_defaults(req, query_url=url) resp = solr.get_response(req) if resp.is_error(): return render_template('errors/generic_error.html', error_message='Error while creating the objects skymap. Please try later.') bibcodes = [x.bibcode for x in resp.get_docset_objects()] statsd.incr("visualization.alladin_lite.viewed") return render_template('alladin_lite_embedded.html', bibcodes={'bibcodes':bibcodes})
def export_to_other_formats(): """ view that exports a set of papers the imput is a format and a list of bibcodes or a variable containing the parameters for a solr query """ #extract the format export_format = request.values.getlist('export_format') list_type = request.values.get('list_type') numRecs = request.values.get('numRecs') #list of bibcodes to extract bibcodes_to_export = [] #flag to check if everything has been extracted all_extracted = True num_hits = None #if there are not bibcodes, there should be first a query to extract them if not request.values.has_key('bibcode'): #@todo: code to query solr with the same query parameters but override the fields to retrieve try: query_components = json.loads(request.values.get('current_search_parameters')) except (TypeError, JSONDecodeError): #@todo: logging of the error return render_template('errors/generic_error.html', error_message='Error while exporting records (code #1). Please try later.') #update the query parameters to return only what is necessary query_components.update({'facets':[], 'fields': ['bibcode'], 'highlights':[], 'rows': str(numRecs)}) if 'sort' not in query_components: # this might be an abstract citation/reference list view so get the sort from config if list_type is not None and list_type in config.ABS_SORT_OPTIONS_MAP: query_components['sort'] = [config.ABS_SORT_OPTIONS_MAP[list_type]] #execute the query if list_type == 'similar': resp = get_document_similar(**query_components) else: req = solr.create_request(**query_components) url = None if 'bigquery' in request.values: from adsabs.core.solr import bigquery bigquery.prepare_bigquery_request(req, request.values['bigquery']) url = config.SOLRBIGQUERY_URL req = solr.set_defaults(req, query_url=url) resp = solr.get_response(req) if resp.is_error(): return render_template('errors/generic_error.html', error_message='Error while exporting records (code #2). Please try later.') #extract the bibcodes for doc in resp.get_docset_objects(): bibcodes_to_export.append(doc.bibcode) #check if all the results of the query have been extracted ( num results <= max to extract ) if resp.get_hits() > numRecs: all_extracted = False num_hits = resp.get_hits() else: #extract all the bibcodes bibcodes_to_export = request.values.getlist('bibcode') #actually export the records if bibcodes_to_export: export_str = get_classic_records_export(bibcodes_to_export, export_format) else: export_str = '' #if not everything has been extracted, show message on top if not all_extracted: export_str = 'Exported first %s results of %s total. \n\n\n%s' % (numRecs, num_hits, export_str) else: export_str = 'Exported %s records \n\n\n%s' % (len(bibcodes_to_export), export_str) return Response(export_str, mimetype='text/plain')
try: query_components = json.loads(request.values.get('current_search_parameters')) except (TypeError, JSONDecodeError), e: #@todo: logging of the error return '' #update the query parameters to return only what is necessary query_components.update({'facets':[], 'fields': ['bibcode'], 'highlights':[], 'rows': numRecs}) if 'sort' not in query_components: query_components['sort'] = create_sort_param(list_type=list_type) #execute the query if list_type == 'similar': resp = get_document_similar(**query_components) else: req = solr.create_request(**query_components) url = None if 'bigquery' in request.values: from adsabs.core.solr import bigquery bigquery.prepare_bigquery_request(req, request.values['bigquery']) url = config.SOLRBIGQUERY_URL req = solr.set_defaults(req, query_url=url) resp = solr.get_response(req) if resp.is_error(): #@todo: logging of the error return '' #extract the bibcodes for doc in resp.get_docset_objects(): bibcodes_to_export.append(doc.bibcode) return ';'.join(bibcodes_to_export)