def transfer_backlog(request): # deal with transfer mode file_mode = False checked_if_in_file_mode = '' if request.GET.get('mode', '') != '': file_mode = True checked_if_in_file_mode = 'checked' # get search parameters from request queries, ops, fields, types = advanced_search.search_parameter_prep( request) # redirect if no search params have been set if not 'query' in request.GET: return helpers.redirect_with_get_params( 'components.ingest.views.transfer_backlog', query='', field='', type='') # get string of URL parameters that should be passed along when paging search_params = advanced_search.extract_url_search_params_from_request( request) # set paging variables if not file_mode: items_per_page = 10 else: items_per_page = 20 page = advanced_search.extract_page_number_from_url(request) start = page * items_per_page + 1 # perform search conn = elasticSearchFunctions.connect_and_create_index('transfers') try: query = advanced_search.assemble_query( queries, ops, fields, types, must_haves=[pyes.TermQuery('status', 'backlog')]) # use all results to pull transfer facets if not in file mode if not file_mode: results = conn.search_raw( query, indices='transfers', type='transferfile', ) else: # otherwise use pages results results = conn.search_raw(query, indices='transfers', type='transferfile', start=start - 1, size=items_per_page) except: return HttpResponse('Error accessing index.') # take note of facet data file_extension_usage = results['facets']['fileExtension']['terms'] transfer_uuids = results['facets']['sipuuid']['terms'] if not file_mode: # run through transfers to see if they've been created yet awaiting_creation = {} for transfer_instance in transfer_uuids: try: awaiting_creation[transfer_instance. term] = transfer_awaiting_sip_creation_v2( transfer_instance.term) transfer = models.Transfer.objects.get( uuid=transfer_instance.term) transfer_basename = os.path.basename( transfer.currentlocation[:-1]) transfer_instance.name = transfer_basename[:-37] transfer_instance.type = transfer.type if transfer.accessionid != None: transfer_instance.accession = transfer.accessionid else: transfer_instance.accession = '' except: awaiting_creation[transfer_instance.term] = False # page data number_of_results = len(transfer_uuids) page_data = helpers.pager(transfer_uuids, items_per_page, page + 1) transfer_uuids = page_data['objects'] else: # page data number_of_results = results.hits.total results = transfer_backlog_augment_search_results(results) # set remaining paging variables end, previous_page, next_page = advanced_search.paging_related_values_for_template_use( items_per_page, page, start, number_of_results) # make sure results is set try: if results: pass except: results = False form = StorageSearchForm(initial={'query': queries[0]}) return render(request, 'ingest/backlog/search.html', locals())
def transfer_backlog(request): # deal with transfer mode file_mode = False checked_if_in_file_mode = '' if request.GET.get('mode', '') != '': file_mode = True checked_if_in_file_mode = 'checked' # get search parameters from request queries, ops, fields, types = advanced_search.search_parameter_prep(request) # redirect if no search params have been set if not 'query' in request.GET: return helpers.redirect_with_get_params( 'components.ingest.views.transfer_backlog', query='', field='', type='' ) # get string of URL parameters that should be passed along when paging search_params = advanced_search.extract_url_search_params_from_request(request) # set paging variables if not file_mode: items_per_page = 10 else: items_per_page = 20 page = advanced_search.extract_page_number_from_url(request) start = page * items_per_page + 1 # perform search conn = elasticSearchFunctions.connect_and_create_index('transfers') try: query = advanced_search.assemble_query( queries, ops, fields, types, must_haves=[pyes.TermQuery('status', 'backlog')] ) # use all results to pull transfer facets if not in file mode if not file_mode: results = conn.search_raw( query, indices='transfers', type='transferfile', ) else: # otherwise use pages results results = conn.search_raw( query, indices='transfers', type='transferfile', start=start - 1, size=items_per_page ) except: return HttpResponse('Error accessing index.') # take note of facet data file_extension_usage = results['facets']['fileExtension']['terms'] transfer_uuids = results['facets']['sipuuid']['terms'] if not file_mode: # run through transfers to see if they've been created yet awaiting_creation = {} for transfer_instance in transfer_uuids: try: awaiting_creation[transfer_instance.term] = transfer_awaiting_sip_creation_v2(transfer_instance.term) transfer = models.Transfer.objects.get(uuid=transfer_instance.term) transfer_basename = os.path.basename(transfer.currentlocation[:-1]) transfer_instance.name = transfer_basename[:-37] transfer_instance.type = transfer.type if transfer.accessionid != None: transfer_instance.accession = transfer.accessionid else: transfer_instance.accession = '' except: awaiting_creation[transfer_instance.term] = False # page data number_of_results = len(transfer_uuids) page_data = helpers.pager(transfer_uuids, items_per_page, page + 1) transfer_uuids = page_data['objects'] else: # page data number_of_results = results.hits.total results = transfer_backlog_augment_search_results(results) # set remaining paging variables end, previous_page, next_page = advanced_search.paging_related_values_for_template_use( items_per_page, page, start, number_of_results ) # make sure results is set try: if results: pass except: results = False form = StorageSearchForm(initial={'query': queries[0]}) return render(request, 'ingest/backlog/search.html', locals())
def search(request): # deal with transfer mode file_mode = False checked_if_in_file_mode = '' if request.GET.get('mode', '') != '': file_mode = True checked_if_in_file_mode = 'checked' # get search parameters from request queries, ops, fields, types = advanced_search.search_parameter_prep( request) # redirect if no search params have been set if not 'query' in request.GET: return helpers.redirect_with_get_params( 'components.archival_storage.views.search', query='', field='', type='') # get string of URL parameters that should be passed along when paging search_params = advanced_search.extract_url_search_params_from_request( request) # set paging variables if not file_mode: items_per_page = 2 else: items_per_page = 20 page = advanced_search.extract_page_number_from_url(request) start = page * items_per_page + 1 # perform search conn = pyes.ES(elasticSearchFunctions.getElasticsearchServerHostAndPort()) try: query = advanced_search.assemble_query(queries, ops, fields, types) # use all results to pull transfer facets if not in file mode # pulling only one field (we don't need field data as we augment # the results using separate queries) if not file_mode: results = conn.search_raw(query=query, indices='aips', type='aipfile', fields='uuid') else: results = conn.search_raw(query=query, indices='aips', type='aipfile', start=start - 1, size=items_per_page, fields='AIPUUID,filePath,FILEUUID') except: return HttpResponse('Error accessing index.') # take note of facet data aip_uuids = results['facets']['AIPUUID']['terms'] if not file_mode: number_of_results = len(aip_uuids) page_data = helpers.pager(aip_uuids, items_per_page, page + 1) aip_uuids = page_data['objects'] search_augment_aip_results(conn, aip_uuids) else: number_of_results = results.hits.total results = search_augment_file_results(results) # set remaining paging variables end, previous_page, next_page = advanced_search.paging_related_values_for_template_use( items_per_page, page, start, number_of_results) # make sure results is set try: if results: pass except: results = False form = forms.StorageSearchForm(initial={'query': queries[0]}) return render(request, 'archival_storage/archival_storage_search.html', locals())
def search(request): # deal with transfer mode file_mode = False checked_if_in_file_mode = '' if request.GET.get('mode', '') != '': file_mode = True checked_if_in_file_mode = 'checked' # get search parameters from request queries, ops, fields, types = advanced_search.search_parameter_prep(request) # redirect if no search params have been set if not 'query' in request.GET: return helpers.redirect_with_get_params( 'components.archival_storage.views.search', query='', field='', type='' ) # get string of URL parameters that should be passed along when paging search_params = advanced_search.extract_url_search_params_from_request(request) # set paging variables if not file_mode: items_per_page = 2 else: items_per_page = 20 page = advanced_search.extract_page_number_from_url(request) start = page * items_per_page + 1 # perform search conn = pyes.ES(elasticSearchFunctions.getElasticsearchServerHostAndPort()) try: query=advanced_search.assemble_query(queries, ops, fields, types) # use all results to pull transfer facets if not in file mode # pulling only one field (we don't need field data as we augment # the results using separate queries) if not file_mode: results = conn.search_raw( query=query, indices='aips', type='aipfile', fields='uuid' ) else: results = conn.search_raw( query=query, indices='aips', type='aipfile', start=start - 1, size=items_per_page, fields='AIPUUID,filePath,FILEUUID' ) except: return HttpResponse('Error accessing index.') # take note of facet data aip_uuids = results['facets']['AIPUUID']['terms'] if not file_mode: number_of_results = len(aip_uuids) page_data = helpers.pager(aip_uuids, items_per_page, page + 1) aip_uuids = page_data['objects'] search_augment_aip_results(conn, aip_uuids) else: number_of_results = results.hits.total results = search_augment_file_results(results) # set remaining paging variables end, previous_page, next_page = advanced_search.paging_related_values_for_template_use( items_per_page, page, start, number_of_results ) # make sure results is set try: if results: pass except: results = False form = forms.StorageSearchForm(initial={'query': queries[0]}) return render(request, 'archival_storage/archival_storage_search.html', locals())