def search_rest(request): if request.method!='POST': raise Http404 f=search_form(request.POST) if f.is_valid(): item=f.cleaned_data['item'] #search fot titles titles titles = search_titles_akas (item) #search for person q=utils.get_query(item,['namesort']) people=[x for x in models.Person.objects.filter(q)] #q=utils.get_query(item,['filename']) #files=[x for x in models.File.objects.filter(q)] files = search_file(item) if not request.user.is_anonymous(): q=utils.get_query(item,['tag']) tagtits = [x.title for x in models.UserTitle.objects.filter(user=request.user, tag__in=[x.id for x in models.Tag.objects.filter(q)] ) ] titles=titles+tagtits data={'titles':set(titles),'people':set(people), 'files':set(files)} else: data={'titles':None,'people':None, 'files':None} return data
def __search_std(item): from django.db.models import Q item, episode, season = __get_season_episode(item) q = utils.get_query(item,['title','titlesort']) titles=[x for x in models.Title.objects.filter(q)] q=utils.get_query(item,['akatitle']) titles=set([x.title for x in models.Aka.objects.filter(q)]+titles) if episode and season: q =(Q(id__in = [x.title_id for x in models.Relation.objects.filter(tvseason = season, tvepisode=episode, parent__in=titles)]) | Q(id__in = [x.title_id for x in models.Relation.objects.filter(tvseason = season, tvepisode=episode, title__in=titles)]) ) titles=[x for x in models.Title.objects.filter(q)] elif season: #no episode but season q =(Q(id__in = [x.title_id for x in models.Relation.objects.filter( tvseason=season, parent__in=titles)]) | Q(id__in = [x.title_id for x in models.Relation.objects.filter( tvseason=season, title__in=titles)]) ) titles=[x for x in models.Title.objects.filter(q)] elif episode:#no season in search q =(Q(id__in = [x.title_id for x in models.Relation.objects.filter( tvepisode=episode, parent__in=titles)]) | Q(id__in = [x.title_id for x in models.Relation.objects.filter( tvepisode=episode, title__in=titles)]) ) titles=[x for x in models.Title.objects.filter(q)] return list(titles)
def __search_parent(item): from django.db.models import Q q = utils.get_query(item,['title']) titles=[x for x in models.Title.objects.filter(q)] q=utils.get_query(item,['akatitle']) titles=set([x.title for x in models.Aka.objects.filter(q)]+titles) q =Q(id__in = [x.title_id for x in models.Relation.objects.filter(parent__in=titles)]) titles=[x for x in models.Title.objects.filter(q)] return list(titles)
def search_filter_paginate(entity_name, all_objects, request): #search search_string = "" items_search = all_objects i = None if 'search' in request.GET and request.GET['search'].strip() != '': search_string = request.GET['search'].strip() if entity_name == 'group': i = get_query(request.GET['search'].strip(), ['slug', 'description']) if entity_name == 'member': i = get_query(request.GET['search'].strip(), ['first_name', 'last_name', 'email']) if entity_name == 'discussion': i = get_query(request.GET['search'].strip(), ['name', 'description']) if entity_name == 'story': i = get_query(request.GET['search'].strip(), ['title', 'content']) items_search = all_objects.filter(i) #filter if entity_name == 'group': f = GroupProfileFilter(request.GET, queryset=items_search) if entity_name == 'member': f = MemberProfileFilter(request.GET, queryset=items_search) if entity_name == 'discussion': f = DiscussionFilter(request.GET, queryset=items_search) if entity_name == 'story': f = StoryFilter(request.GET, queryset=items_search) #pagination items_list = f.qs paginator = Paginator(items_list, 20) # Show 20 items per page # Make sure page request is an int. If not, deliver first page. try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 # If page request (9999) is out of range, deliver last page of results. try: my_items = paginator.page(page) except (EmptyPage, InvalidPage): my_items = paginator.page(paginator.num_pages) get_parameters = analyze_filters_parameters(entity_name, request) if 'search' in request.GET: if get_parameters == "?": get_parameters = "?search=%s" % request.GET['search'] else: get_parameters = "%s&search=%s" % (get_parameters, request.GET['search']) return (my_items, get_parameters, f)
def web_search(request): data = request.GET keyword = data["q"] '''import pdb;pdb.set_trace()''' try: page_num = data['page_num'] except: page_num = 1 tagval = keyword.strip() tagval = str(tagval).split(" ") entry_query = get_query( keyword.strip(), ['name', 'description', 'isbn_number', 'available__name']) posts = Product.objects.filter(entry_query | Q( tags__name__in=tagval)).distinct().order_by('id').reverse() posts, total_pages = paginate(posts, page_num) return render( request, 'search.html', { 'data': posts.object_list, 'total_pages': total_pages, 'current_page': int(page_num), 'next_page': int(page_num) + 1, 'previous_page': int(page_num) - 1, 'q': keyword })
def test_get_query(): query_string = "#december -is:retweet -is:quote lang:en" nb_tweets = 10 jjmin = -2 jjmax = 1 tweet_fields = "created_at,public_metrics,author_id" twitter_expected_query = { "query": query_string, "max_results": nb_tweets, # "start_time": "2020-12-01T00:00:00Z", # "end_time": "2020-12-07T09:24:00Z", "tweet.fields": tweet_fields } generated_query = json.loads( get_query(query_string, jjmin, jjmax, tweet_fields, nb_tweets=nb_tweets)) generated_fields = list(generated_query.keys()) for query_field in twitter_expected_query: assert twitter_expected_query[query_field] == generated_query[ query_field] # test start_time and end_time fields (we should also test their values) # assertIn("start_time", generated_fields) # assertIn("end_time", generated_fields) assert "start_time" in generated_fields assert "end_time" in generated_fields
def search(self, request, *args, **kwargs): #TODO: enable pagination if 'entry_queryset' in kwargs: queryset = kwargs['entry_queryset'] del kwargs['entry_queryset'] else: queryset = self.entry_queryset query_string = '' found_entries = None if ('q' in request.GET) and request.GET['q'].strip(): query_string = request.GET['q'] entry_query = get_query(query_string, ['title', 'body',]) found_entries = queryset.filter(entry_query) context = { 'query_string': query_string, 'found_entries': found_entries } if 'extra_context' in kwargs: context.update(kwargs['extra_context'] or {}) return render_to_response('%s/search_results.html' % self.template_root_path, context, context_instance=RequestContext(request))
def __search(request, obj_klass, template_name, field_set, range_date_set=[]): form_klass = obj_klass.__name__ + "SearchForm" constructor_frm = globals()[form_klass] if request.method == 'POST': nothing_exists = True results = obj_klass.objects.all() if results: nothing_exists = False for field_name, search_type in field_set: v = request.POST.get(field_name, None) if v: if search_type == SearchField.EXACT_MATCH: results = results.filter(**{field_name: v}) elif search_type == SearchField.QUERY: query = get_query(v, [field_name, ]) results = results.filter(query) elif search_type == SearchField.DATE_RANGE: pass if not results: break if results: for field_name in range_date_set: start = request.POST.get(field_name + '_start', None) end = request.POST.get(field_name + '_end', None) if start: results = results.filter(**{field_name+'__gte': start}) if end: results = results.filter(**{field_name+'__lte': end}) if not results: break return render_to_response(template_name, {'form': constructor_frm(request.POST), 'nothing_exists': nothing_exists, 'method': request.method, 'objects_tolist': results}) else: return render_to_response(template_name, {'form': constructor_frm()})
def get(self): news_api = NewsApiClient(api_key=NEWS_API_KEY) # Call News Api Client q = get_query() # Get the search query from the request headlines = news_api.get_top_headlines( q=q) # Call the url for getting the results. # Marshalling the fields to return transformation on the input fields to match the expected output fields. resource_fields = get_resource_fields() data = jsonify(marshal(headlines['articles'], resource_fields)) return data
def search(request): found_entries = [] if ('q' in request.GET) and request.GET['q'].strip(): #get_query function performs search on two fields (name and category) entry_query = get_query(request.GET['q'], [ 'name', 'category', ]) found_entries = Product.objects.filter(entry_query) found_entries = serializers.serialize("json", found_entries) #print JSONResponse(found_entries) return JSONResponse(found_entries)
def search(request): query_string = '' found_entries = None data = {} if ('q' in request.POST) and request.POST['q'].strip(): query_string = request.POST['q'] entry_query = get_query(query_string, ['title', 'author','description']) found_entries = Books.objects.filter(entry_query) return render_to_response('search_results.html', { 'query_string': query_string, 'found_entries': found_entries })
def web_search(request): data = request.GET keyword = data["q"] '''import pdb;pdb.set_trace()''' try: page_num=data['page_num'] except: page_num=1 tagval = keyword.strip() tagval = str(tagval).split(" ") entry_query = get_query(keyword.strip(), ['name','description','isbn_number','available__name']) posts = Product.objects.filter(entry_query|Q(tags__name__in=tagval)).distinct().order_by('id').reverse() posts,total_pages = paginate(posts, page_num) return render(request, 'search.html',{'data':posts.object_list,'total_pages':total_pages, 'current_page':int(page_num), 'next_page':int(page_num)+1, 'previous_page':int(page_num)-1, 'q':keyword})
def list_comments(request, return_raw_comments=False): terms = request.GET.get("terms", "") for_user = request.GET.get("foruser", "") page_title = "Latest comments on projects" filter_description = "" use_filter_description = False comments = Comment.objects.all().order_by("-pub_date") # For RSS feeds if return_raw_comments: return page_title, comments if for_user: for_user_obj = get_object_or_404(User, username=for_user) filter_description += "<li>user: %s</li>" % for_user page_title = "Latest comments for user '%s'" % for_user comments = comments.filter(author=for_user_obj) if terms: page_title = "Searching latest comments for '%s'" % terms use_filter_description = True query = get_query(terms, ["text"]) comments = comments.filter(query) comments = comments.order_by("-pub_date") paginated_comments = get_paginator_page(request, comments, PROJECTS_PER_PAGE) return render_to_response( "projects/comment_list.html", { "page_title": page_title, "filter_description": use_filter_description and filter_description or None, "search_results_type": terms and "comments" or "", "search_terms": terms and terms or "", "paginated_comments": paginated_comments, }, context_instance=RequestContext(request), )
def get(self): d = list() reddit = praw.Reddit( client_id=REDDIT_CLIENT_ID, client_secret=REDDIT_CLIENT_SECRET, user_agent=REDDIT_USER_AGENT) # Get connected to reddit. q = get_query() # Get the search query from the request resource_fields = get_resource_fields( source='reddit' ) # Map the resource fields to expected output fields. # Create the url to hit depending upon weather search query is present or not. news = reddit.subreddit('news').search( q, sort='new', limit=10) if q else reddit.subreddit('news').new( limit=10) for n in news: data = json.dumps(marshal( n, resource_fields)) # Marshall every news object. d.append(json.loads(data)) # Add the object to final list of news. return d
def search(self, request, entry_queryset=None, extra_context=None): if entry_queryset is not None: queryset = entry_queryset else: queryset = self.entry_queryset query_string = '' found_entries = None if ('q' in request.GET) and request.GET['q'].strip(): query_string = request.GET['q'] entry_query = get_query(query_string, ['title', 'body',]) found_entries = queryset.filter(entry_query) context = { 'query_string': query_string, 'found_entries': found_entries } context.update(extra_context or {}) return render_to_response('%s/search_results.html' % self.template_root_path, context, context_instance=RequestContext(request))
def index(request): product_list = Product.objects.all().filter(active=True).order_by('price') query_string = '' found_entries = None searched = False isStoreStaff = False message = '' for product in product_list: if product.stock_quantity < 50: message += "Stock quantity of {0} is low. Only {1} items remain in stock.\n".format(product.name, product.stock_quantity) if (request.user.is_staff): isStoreStaff = True if ('q' in request.GET) and request.GET['q'].strip(): searched = True query_string = request.GET['q'] sort_by = request.GET['sort_by'] product_list = Product.objects.all().filter(active=True).order_by(sort_by) entry_query = get_query(query_string, ['name','description']) found_entries = Product.objects.filter(entry_query).order_by(sort_by) return render_to_response('onlineStore/index.html', {'isStoreStaff': isStoreStaff, 'searched':searched, 'product_list': product_list, 'query_string': query_string, 'found_entries': found_entries }, context_instance=RequestContext(request)) if request.method == "GET": sort_by = request.GET.get('sort_by', 'price') product_list = Product.objects.all().filter(active=True).order_by(sort_by) messages.add_message(request, messages.INFO, message) return render_to_response('onlineStore/index.html', {'isStoreStaff': isStoreStaff, 'searched':searched, 'product_list': product_list, 'query_string': query_string, 'found_entries': found_entries }, context_instance=RequestContext(request)) return render_to_response('onlineStore/index.html',{ 'product_list': product_list, 'searched': searched, 'query_string': query_string, 'found_entries': found_entries, 'isStoreStaff' : isStoreStaff }, context_instance=RequestContext(request))
def search_render(): fluid = this.props.fluid return e(ui.Search, size=this.props.size, input=e(ui.Input, fluid=this.props.fluid, placeholder="Search title, artist, namespace & tags", label=e( ui.Popup, e(SearchOptions), trigger=e(ui.Label, e(ui.Icon, js_name="options"), "Search Options", as_="a"), hoverable=True, on="click", hideOnScroll=True, )), fluid=True, icon=e(ui.Icon, js_name="search", link=True), className=this.props.className, onSearchChange=this.on_search_change, defaultValue=utils.get_query("search", ""))
def search(terms): query = get_query(terms, ['text',]) return Comment.objects.filter(query)
def search(terms): query = get_query(terms, ['title', 'description_markdown',]) return Project.objects.filter(query)
def show_table(input_value): if input_value: return generate_table(query_db(get_query(input_value)))
def show_query(input_value): return get_query(input_value)
tertiary=this.props.tertiary, container=this.props.container, item_count=this.state.item_count, limit=limit, page=this.state.page, set_page=this.set_page, label=this.props.label, ) ItemView = createReactClass({ 'displayName': 'ItemView', 'getInitialState': lambda: { 'page': int(utils.get_query("page", 1)) or 1, 'search_query': utils.get_query("search", "") or this.props. search_query, 'infinitescroll': False, 'default_limit': 30, 'items': [], "element": None, "loading": True, "item_count": 1, }, 'get_items_count': get_items_count, 'get_items': get_items, 'get_element': get_element,
label="Series"), )), e(ui.Grid.Row, e(ui.Grid.Column, e(items.ItemView, item_id=item_id, item_type=ItemType.Gallery, related_type=ItemType.Page, label="Pages", container=True, secondary=True))), stackable=True, container=True ) GalleryPage = createReactClass({ 'displayName': 'GalleryPage', 'getInitialState': lambda: {'id': int(utils.get_query("id", 0)), 'data':this.props.data, 'tag_data':this.props.tag_data or {}, 'lang_data':this.props.lang_data or {}, 'status_data':this.props.status_data or {}, 'group_data':this.props.group_data or [], 'item_type':ItemType.Gallery, 'loading':True, 'loading_group':True, }, 'get_item': get_item, 'get_grouping': get_grouping, 'get_tags': get_tags, 'get_lang': get_lang, 'get_status': get_status,
200, 'optimizer': 'adam', 'adam_lr': 1e-3, 'l2_regularization': 0, 'test_size': 500, 'GNNStep': 3 } train_data = get_train_data(config['train_path'], config['attack_types']) valid_data = get_query(config['valid_query_path']) test_data = get_query(config['test_query_path']) sample_generator = SampleGenerator(config, train_data, valid_data, test_data) engine = ProcedureEngine(config) for epoch in range(config['num_epoch']): print('Epoch{}starts !'.format(epoch)) print('_' * 80) #engine.train_an_epoch(sample_generator,epoch) #val_f1=engine.evaluate(sample_generator,epoch) #engine.save(config['alias'],epoch,val_f1) engine.get_result(sample_generator, epoch)
def list_projects(request, list_type="top", is_completed=None, return_raw_projects=False): user = request.user tags = request.GET.get("tags", "") for_user = request.GET.get("foruser", "") terms = request.GET.get("terms", "") projects = None page_title = "Project list" filter_description = "" # Filter by tags (comes first since we use TaggedItem.objects.get_by_model) if list_type == "recommend": profile = Profile.objects.get(user=user) user_tags = profile.get_tags() # get all projects matching >=1 of the user's tags projects = TaggedItem.objects.get_union_by_model(Project, user_tags) elif tags != "": projects = TaggedItem.objects.get_by_model(Project, tags) filter_description += "<li>tags: %s</li>" % tags # or select a first crude set of results to be filtered else: projects = Project.objects.all() # Filter by completeness if not is_completed is None: if is_completed: page_title = "Completed projects" projects = projects.filter(p_completed=True, wont_be_completed=False) else: page_title = "Proposed projects" projects = projects.filter(p_completed=False, wont_be_completed=False) # Filter by search terms if terms != "": page_title = "Search results for '%s'" % terms query = get_query(terms, ["title", "description_markdown"]) projects = projects.filter(query) # Filter by user if for_user != "": filter_description += "<li>user: %s</li>" % for_user for_user_obj = get_object_or_404(User, username=for_user) projects = projects.filter(author=for_user_obj) # Prepare query string given filters, for link URLs qs = "" qs_dict = {} if tags: qs_dict["tags"] = tags if for_user: qs_dict["foruser"] = for_user if terms: qs_dict["terms"] = terms if qs_dict: qs = "?" + urllib.urlencode(qs_dict) top_url = "/projects/" + (is_completed and "completed" or "proposed") + "/top/" + qs new_url = "/projects/" + (is_completed and "completed" or "proposed") + "/new/" + qs mytags_url = "/projects/" + (is_completed and "completed" or "proposed") + "/recommend/" + qs this_page_url = None # Order results page_url = "" rss_url = "" if list_type == "new": this_page_url = new_url rss_url = "/projects/rss/" + (is_completed and "completed" or "proposed") + "/new/" + qs projects = projects.order_by("-pub_date") elif list_type == "recommend": this_page_url = mytags_url else: rss_url = "/projects/rss/" + (is_completed and "completed" or "proposed") + "/top/" + qs this_page_url = top_url if is_completed: projects = projects.order_by("-score_completed") else: projects = projects.order_by("-score_proposed") # For RSS feeds if return_raw_projects: return page_title, this_page_url, projects, list_type list_paginator_page = get_paginator_page(request, projects, PROJECTS_PER_PAGE) return render_to_response( "projects/project_list.html", { "project_list_page": list_paginator_page, "page_title": page_title, "list_type": list_type, "filter_description": filter_description, # TODO: also include tags in those urls "list_top_url": top_url, "list_new_url": new_url, "search_results_type": terms and "projects" or "", "search_terms": terms and terms or "", "rss_url": rss_url, "list_mytags_url": mytags_url, }, context_instance=RequestContext(request), )
from datetime import datetime,timedelta #current time now=datetime.now() before=now-timedelta(hours=12) #current time in epoch format to = utils.get_date(date=now) #last one hour time in epoch format frm = utils.get_date(date=now, th=12) #get query for reconnected device=['Device_IPv4','Device_IPv6'] avg=[] node_avg=[] node_based_avg={} files=[] for ip in device: query = utils.get_query(frm=frm,to=to, ip=ip) #Get data from elasticsearch result = utils.get_details(index="prod_push_cep-*", body=query) #fetch details and store it in list result = utils.fetch_hits(result=result, ip=ip) data=utils.sort_data(result) # x=utils.sort_node_based(data) #print x filename='/var/www/html/Report/'+ ip + '_'+datetime.now().strftime("%Y%m%d-%H%M")+'.csv' header1=["UAID","IP","Host"] temp=1 for v in data.itervalues(): if len(v) > temp: temp=len(v)
def search_file(searchstr): if not searchstr: return [] q=utils.get_query(searchstr,['filename','path']) return [x for x in models.File.objects.filter(valid=True).filter(q).order_by("filename")]