def transfer_backlog(request): # deal with transfer mode file_mode = False checked_if_in_file_mode = '' if request.GET.get('mode', '') != '': file_mode = True checked_if_in_file_mode = 'checked' # get search parameters from request queries, ops, fields, types = advanced_search.search_parameter_prep( request) # redirect if no search params have been set if not 'query' in request.GET: return helpers.redirect_with_get_params( 'components.ingest.views.transfer_backlog', query='', field='', type='') # get string of URL parameters that should be passed along when paging search_params = advanced_search.extract_url_search_params_from_request( request) # set paging variables if not file_mode: items_per_page = 10 else: items_per_page = 20 page = advanced_search.extract_page_number_from_url(request) start = page * items_per_page + 1 # perform search conn = elasticSearchFunctions.connect_and_create_index('transfers') try: query = advanced_search.assemble_query( queries, ops, fields, types, must_haves=[pyes.TermQuery('status', 'backlog')]) # use all results to pull transfer facets if not in file mode if not file_mode: results = conn.search_raw( query, indices='transfers', type='transferfile', ) else: # otherwise use pages results results = conn.search_raw(query, indices='transfers', type='transferfile', start=start - 1, size=items_per_page) except: return HttpResponse('Error accessing index.') # take note of facet data file_extension_usage = results['facets']['fileExtension']['terms'] transfer_uuids = results['facets']['sipuuid']['terms'] if not file_mode: # run through transfers to see if they've been created yet awaiting_creation = {} for transfer_instance in transfer_uuids: try: awaiting_creation[transfer_instance. term] = transfer_awaiting_sip_creation_v2( transfer_instance.term) transfer = models.Transfer.objects.get( uuid=transfer_instance.term) transfer_basename = os.path.basename( transfer.currentlocation[:-1]) transfer_instance.name = transfer_basename[:-37] transfer_instance.type = transfer.type if transfer.accessionid != None: transfer_instance.accession = transfer.accessionid else: transfer_instance.accession = '' except: awaiting_creation[transfer_instance.term] = False # page data number_of_results = len(transfer_uuids) page_data = helpers.pager(transfer_uuids, items_per_page, page + 1) transfer_uuids = page_data['objects'] else: # page data number_of_results = results.hits.total results = transfer_backlog_augment_search_results(results) # set remaining paging variables end, previous_page, next_page = advanced_search.paging_related_values_for_template_use( items_per_page, page, start, number_of_results) # make sure results is set try: if results: pass except: results = False form = StorageSearchForm(initial={'query': queries[0]}) return render(request, 'ingest/backlog/search.html', locals())
def search(request): # deal with transfer mode file_mode = False checked_if_in_file_mode = '' if request.GET.get('mode', '') != '': file_mode = True checked_if_in_file_mode = 'checked' # get search parameters from request queries, ops, fields, types = advanced_search.search_parameter_prep( request) # redirect if no search params have been set if not 'query' in request.GET: return helpers.redirect_with_get_params( 'components.archival_storage.views.search', query='', field='', type='') # get string of URL parameters that should be passed along when paging search_params = advanced_search.extract_url_search_params_from_request( request) # set paging variables if not file_mode: items_per_page = 2 else: items_per_page = 20 page = advanced_search.extract_page_number_from_url(request) start = page * items_per_page + 1 # perform search conn = pyes.ES(elasticSearchFunctions.getElasticsearchServerHostAndPort()) try: query = advanced_search.assemble_query(queries, ops, fields, types) # use all results to pull transfer facets if not in file mode # pulling only one field (we don't need field data as we augment # the results using separate queries) if not file_mode: results = conn.search_raw(query=query, indices='aips', type='aipfile', fields='uuid') else: results = conn.search_raw(query=query, indices='aips', type='aipfile', start=start - 1, size=items_per_page, fields='AIPUUID,filePath,FILEUUID') except: return HttpResponse('Error accessing index.') # take note of facet data aip_uuids = results['facets']['AIPUUID']['terms'] if not file_mode: number_of_results = len(aip_uuids) page_data = helpers.pager(aip_uuids, items_per_page, page + 1) aip_uuids = page_data['objects'] search_augment_aip_results(conn, aip_uuids) else: number_of_results = results.hits.total results = search_augment_file_results(results) # set remaining paging variables end, previous_page, next_page = advanced_search.paging_related_values_for_template_use( items_per_page, page, start, number_of_results) # make sure results is set try: if results: pass except: results = False form = forms.StorageSearchForm(initial={'query': queries[0]}) return render(request, 'archival_storage/archival_storage_search.html', locals())
def transfer_backlog(request): # deal with transfer mode file_mode = False checked_if_in_file_mode = '' if request.GET.get('mode', '') != '': file_mode = True checked_if_in_file_mode = 'checked' # get search parameters from request queries, ops, fields, types = advanced_search.search_parameter_prep(request) # redirect if no search params have been set if not 'query' in request.GET: return helpers.redirect_with_get_params( 'components.ingest.views.transfer_backlog', query='', field='', type='' ) # get string of URL parameters that should be passed along when paging search_params = advanced_search.extract_url_search_params_from_request(request) # set paging variables if not file_mode: items_per_page = 10 else: items_per_page = 20 page = advanced_search.extract_page_number_from_url(request) start = page * items_per_page + 1 # perform search conn = elasticSearchFunctions.connect_and_create_index('transfers') try: query = advanced_search.assemble_query( queries, ops, fields, types, must_haves=[pyes.TermQuery('status', 'backlog')] ) # use all results to pull transfer facets if not in file mode if not file_mode: results = conn.search_raw( query, indices='transfers', type='transferfile', ) else: # otherwise use pages results results = conn.search_raw( query, indices='transfers', type='transferfile', start=start - 1, size=items_per_page ) except: return HttpResponse('Error accessing index.') # take note of facet data file_extension_usage = results['facets']['fileExtension']['terms'] transfer_uuids = results['facets']['sipuuid']['terms'] if not file_mode: # run through transfers to see if they've been created yet awaiting_creation = {} for transfer_instance in transfer_uuids: try: awaiting_creation[transfer_instance.term] = transfer_awaiting_sip_creation_v2(transfer_instance.term) transfer = models.Transfer.objects.get(uuid=transfer_instance.term) transfer_basename = os.path.basename(transfer.currentlocation[:-1]) transfer_instance.name = transfer_basename[:-37] transfer_instance.type = transfer.type if transfer.accessionid != None: transfer_instance.accession = transfer.accessionid else: transfer_instance.accession = '' except: awaiting_creation[transfer_instance.term] = False # page data number_of_results = len(transfer_uuids) page_data = helpers.pager(transfer_uuids, items_per_page, page + 1) transfer_uuids = page_data['objects'] else: # page data number_of_results = results.hits.total results = transfer_backlog_augment_search_results(results) # set remaining paging variables end, previous_page, next_page = advanced_search.paging_related_values_for_template_use( items_per_page, page, start, number_of_results ) # make sure results is set try: if results: pass except: results = False form = StorageSearchForm(initial={'query': queries[0]}) return render(request, 'ingest/backlog/search.html', locals())
def search(request): # FIXME there has to be a better way of handling checkboxes than parsing # them by hand here, and displaying 'checked' in # _archival_storage_search_form.html # Parse checkbox for file mode yes_options = ('checked', 'yes', 'true', 'on') if request.GET.get('filemode', '') in yes_options: file_mode = True checked_if_in_file_mode = 'checked' items_per_page = 20 else: # AIP list file_mode = False checked_if_in_file_mode = '' items_per_page = 10 # Parse checkbox for show AICs show_aics = '' if request.GET.get('show_aics', '') in yes_options: show_aics = 'checked' # get search parameters from request queries, ops, fields, types = advanced_search.search_parameter_prep(request) logger.debug('Queries: %s, Ops: %s, Fields: %s, Types: %s', queries, ops, fields, types) # redirect if no search params have been set if 'query' not in request.GET: return helpers.redirect_with_get_params( 'components.archival_storage.views.search', query='', field='', type='' ) # get string of URL parameters that should be passed along when paging search_params = advanced_search.extract_url_search_params_from_request(request) current_page_number = int(request.GET.get('page', 1)) # perform search es_client = elasticSearchFunctions.get_client() results = None query = advanced_search.assemble_query(es_client, queries, ops, fields, types, search_index='aips', doc_type='aipfile') try: # use all results to pull transfer facets if not in file mode # pulling only one field (we don't need field data as we augment # the results using separate queries) if not file_mode: # Fetch all unique AIP UUIDs in the returned set of files query['aggs'] = {'aip_uuids': {'terms': {'field': 'AIPUUID', 'size': 0}}} # Don't return results, just the aggregation query['size'] = 0 # Searching for AIPs still actually searches type 'aipfile', and # returns the UUID of the AIP the files are a part of. To search # for an attribute of an AIP, the aipfile must index that # information about their AIP in # elasticSearchFunctions.index_mets_file_metadata results = es_client.search( body=query, index='aips', doc_type='aipfile', sort='sipName:desc', ) # Given these AIP UUIDs, now fetch the actual information we want from aips/aip buckets = results['aggregations']['aip_uuids']['buckets'] uuids = [bucket['key'] for bucket in buckets] uuid_file_counts = {bucket['key']: bucket['doc_count'] for bucket in buckets} query = { 'query': { 'terms': { 'uuid': uuids, }, }, } index = 'aips' doc_type = 'aip' fields = 'name,uuid,size,created,status,AICID,isPartOf,countAIPsinAIC,encrypted' sort = 'name:desc' else: index = 'aips' doc_type = 'aipfile' fields = 'AIPUUID,filePath,FILEUUID,encrypted' sort = 'sipName:desc' # To reduce amount of data fetched from ES, use LazyPagedSequence def es_pager(page, page_size): """ Fetch one page of normalized aipfile entries from Elasticsearch. :param page: 1-indexed page to fetch :param page_size: Number of entries on a page :return: List of dicts for each entry with additional information """ start = (page - 1) * page_size results = es_client.search( body=query, from_=start, size=page_size, index=index, doc_type=doc_type, fields=fields, sort=sort, ) if file_mode: return search_augment_file_results(es_client, results) else: return search_augment_aip_results(results, uuid_file_counts) count = es_client.count(index=index, doc_type=doc_type, body={'query': query['query']})['count'] results = LazyPagedSequence(es_pager, items_per_page, count) except ElasticsearchException: logger.exception('Error accessing index.') return HttpResponse('Error accessing index.') if not file_mode: aic_creation_form = forms.CreateAICForm(initial={'results': uuids}) else: # if file_mode aic_creation_form = None page_data = helpers.pager(results, items_per_page, current_page_number) return render(request, 'archival_storage/search.html', { 'file_mode': file_mode, 'show_aics': show_aics, 'checked_if_in_file_mode': checked_if_in_file_mode, 'aic_creation_form': aic_creation_form, 'results': page_data.object_list, 'search_params': search_params, 'page': page_data, } )
def search(request): # deal with transfer mode file_mode = False checked_if_in_file_mode = '' if request.GET.get('mode', '') != '': file_mode = True checked_if_in_file_mode = 'checked' # get search parameters from request queries, ops, fields, types = advanced_search.search_parameter_prep(request) # redirect if no search params have been set if not 'query' in request.GET: return helpers.redirect_with_get_params( 'components.archival_storage.views.search', query='', field='', type='' ) # get string of URL parameters that should be passed along when paging search_params = advanced_search.extract_url_search_params_from_request(request) # set paging variables if not file_mode: items_per_page = 2 else: items_per_page = 20 page = advanced_search.extract_page_number_from_url(request) start = page * items_per_page + 1 # perform search conn = pyes.ES(elasticSearchFunctions.getElasticsearchServerHostAndPort()) try: query=advanced_search.assemble_query(queries, ops, fields, types) # use all results to pull transfer facets if not in file mode # pulling only one field (we don't need field data as we augment # the results using separate queries) if not file_mode: results = conn.search_raw( query=query, indices='aips', type='aipfile', fields='uuid' ) else: results = conn.search_raw( query=query, indices='aips', type='aipfile', start=start - 1, size=items_per_page, fields='AIPUUID,filePath,FILEUUID' ) except: return HttpResponse('Error accessing index.') # take note of facet data aip_uuids = results['facets']['AIPUUID']['terms'] if not file_mode: number_of_results = len(aip_uuids) page_data = helpers.pager(aip_uuids, items_per_page, page + 1) aip_uuids = page_data['objects'] search_augment_aip_results(conn, aip_uuids) else: number_of_results = results.hits.total results = search_augment_file_results(results) # set remaining paging variables end, previous_page, next_page = advanced_search.paging_related_values_for_template_use( items_per_page, page, start, number_of_results ) # make sure results is set try: if results: pass except: results = False form = forms.StorageSearchForm(initial={'query': queries[0]}) return render(request, 'archival_storage/archival_storage_search.html', locals())
def search(request): # FIXME there has to be a better way of handling checkboxes than parsing # them by hand here, and displaying 'checked' in # _archival_storage_search_form.html # Parse checkbox for file mode yes_options = ("checked", "yes", "true", "on") if request.GET.get("filemode", "") in yes_options: file_mode = True checked_if_in_file_mode = "checked" items_per_page = 20 else: # AIP list file_mode = False checked_if_in_file_mode = "" items_per_page = 10 # Parse checkbox for show AICs show_aics = "" if request.GET.get("show_aics", "") in yes_options: show_aics = "checked" # get search parameters from request queries, ops, fields, types = advanced_search.search_parameter_prep( request) logger.debug("Queries: %s, Ops: %s, Fields: %s, Types: %s", queries, ops, fields, types) # redirect if no search params have been set if "query" not in request.GET: return helpers.redirect_with_get_params( "components.archival_storage.views.search", query="", field="", type="") # get string of URL parameters that should be passed along when paging search_params = advanced_search.extract_url_search_params_from_request( request) current_page_number = int(request.GET.get("page", 1)) # perform search es_client = elasticSearchFunctions.get_client() results = None query = advanced_search.assemble_query(queries, ops, fields, types) try: # Use all results to pull transfer facets if not in file mode # pulling only one field (we don't need field data as we augment # the results using separate queries). if not file_mode: # Fetch all unique AIP UUIDs in the returned set of files # ES query will limit to 10 aggregation results by default, # add size parameter in terms to override. # TODO: Use composite aggregation when it gets out of beta. query["aggs"] = { "aip_uuids": { "terms": { "field": "AIPUUID", "size": "10000" } } } # Don't return results, just the aggregation query["size"] = 0 # Searching for AIPs still actually searches type 'aipfile', and # returns the UUID of the AIP the files are a part of. To search # for an attribute of an AIP, the aipfile must index that # information about their AIP. results = es_client.search(body=query, index="aipfiles") # Given these AIP UUIDs, now fetch the actual information we want from aips/aip buckets = results["aggregations"]["aip_uuids"]["buckets"] uuids = [bucket["key"] for bucket in buckets] uuid_file_counts = { bucket["key"]: bucket["doc_count"] for bucket in buckets } query = {"query": {"terms": {"uuid": uuids}}} index = "aips" fields = ( "name,uuid,size,created,status,AICID,isPartOf,countAIPsinAIC,encrypted" ) sort = "name.raw:desc" else: index = "aipfiles" fields = "AIPUUID,filePath,FILEUUID,encrypted" sort = "sipName.raw:desc" # To reduce amount of data fetched from ES, use LazyPagedSequence def es_pager(page, page_size): """ Fetch one page of normalized aipfile entries from Elasticsearch. :param page: 1-indexed page to fetch :param page_size: Number of entries on a page :return: List of dicts for each entry with additional information """ start = (page - 1) * page_size results = es_client.search( body=query, from_=start, size=page_size, index=index, _source=fields, sort=sort, ) if file_mode: return search_augment_file_results(es_client, results) else: return search_augment_aip_results(results, uuid_file_counts) count = es_client.count(index=index, body={"query": query["query"]})["count"] results = LazyPagedSequence(es_pager, items_per_page, count) except ElasticsearchException: logger.exception("Error accessing index.") return HttpResponse("Error accessing index.") if not file_mode: aic_creation_form = forms.CreateAICForm(initial={"results": uuids}) else: # if file_mode aic_creation_form = None page_data = helpers.pager(results, items_per_page, current_page_number) return render( request, "archival_storage/search.html", { "file_mode": file_mode, "show_aics": show_aics, "checked_if_in_file_mode": checked_if_in_file_mode, "aic_creation_form": aic_creation_form, "results": page_data.object_list, "search_params": search_params, "page": page_data, }, )