def search(bot, update): try: text = update.message.text query = helpers.get_query(text) if not query: msg = "Use: %s" % srch_msg return bot.send_message( chat_id=update.callback_query.message.chat_id, text=msg) results = helpers.search_songs(query) text = "" for res in results: text += "%s - %s\n" % (res["title"], helpers.youtube_url % res["url"]) button_list = list( map( lambda x: InlineKeyboardButton(x["title"], callback_data=x["url"]), results)) reply_markup = InlineKeyboardMarkup( helpers.build_menu(button_list, n_cols=3)) bot.send_message(chat_id=update.message.chat_id, text=text, reply_markup=reply_markup) except: pass
def entity_list(request): entities = Entity.objects.all() category_filter = request.GET.get('cat', None) if category_filter: entities = entities.filter(categories__pk=category_filter) query_string = '' if ('q' in request.GET) and request.GET['q'].strip(): query_string = request.GET['q'] entry_query = helpers.get_query( query_string, ['name', 'description', 'short_description']) if entry_query: entities = entities.filter(entry_query) page = request.GET.get('page') entities = helpers.paginate(entities, page, elems_perpage=6) params = { 'ajax_url': reverse('entity_list'), 'query_string': query_string, 'entities': entities, 'page': page } if request.is_ajax(): response = render(request, 'entity/search_results.html', params) response['Cache-Control'] = 'no-cache' response['Vary'] = 'Accept' return response else: params['categories'] = Category.objects.all() return render(request, 'entity/list.html', params)
def profile_list(request): persons = Person.objects.all() query_string = '' if ('q' in request.GET) and request.GET['q'].strip(): query_string = request.GET['q'] entry_query = helpers.get_query(query_string, ['name', 'surname', 'email']) if entry_query: persons = persons.filter(entry_query) page = request.GET.get('page') persons = helpers.paginate(persons, page, elems_perpage=10) params = { 'ajax_url': reverse('profile_list'), 'query_string': query_string, 'profiles': persons, 'page': page } if request.is_ajax(): response = render(request, 'profile/profile_query.html', params) response['Cache-Control'] = 'no-cache' response['Vary'] = 'Accept' return response else: return render(request, 'profile/list.html', params)
def activity_list(request): activities = Activity.objects.all() query_string = '' if ('q' in request.GET) and request.GET['q'].strip(): query_string = request.GET['q'] entry_query = helpers.get_query(query_string, ['name', 'description', 'type_activity']) if entry_query: activities = activities.filter(entry_query) page = request.GET.get('page') activities = helpers.paginate(activities, page, elems_perpage=15) params = { 'ajax_url': reverse('activity_list'), 'query_string': query_string, 'activities': activities, 'page': page } if request.is_ajax(): response = render(request, 'activity/search_results.html', params) response['Cache-Control'] = 'no-cache' response['Vary'] = 'Accept' return response else: return render(request, 'activity/list.html', params)
def news_list(request): news = News.objects.all() query_string = '' if ('q' in request.GET) and request.GET['q'].strip(): query_string = request.GET['q'] entry_query = helpers.get_query( query_string, ['name', 'description', 'short_description']) if entry_query: news = news.filter(entry_query) page = request.GET.get('page') news = helpers.paginate(news, page, elems_perpage=10) params = { 'ajax_url': reverse('news_list'), 'query_string': query_string, 'news': news, 'page': page } if request.is_ajax(): response = render(request, 'news/search_results.html', params) response['Cache-Control'] = 'no-cache' response['Vary'] = 'Accept' return response else: return render(request, 'news/list.html', params)
def wallet_search(request): wallets = Wallet.objects.all().select_related('user') query_string = '' if ('q' in request.GET) and request.GET['q'].strip(): query_string = request.GET['q'].strip() entry_query = helpers.get_query(query_string, ['user__entity__name', 'user__person__name', 'user__username']) if entry_query: wallets = wallets.filter(entry_query) page = request.GET.get('page') wallets = helpers.paginate(wallets, page, elems_perpage=12) params = { 'ajax_url': reverse('wallet_search'), 'query_string': query_string, 'wallets': wallets, } if request.is_ajax(): response = render(request, 'wallets/wallets_query.html', params) response['Cache-Control'] = 'no-cache' response['Vary'] = 'Accept' return response else: return render(request, 'wallets/wallets_list.html', params)
def reroll(): # Get the user id user_id = get_user() # Get the recipe id of wanted reroll and delete it idr = request.form.get("reroll") database.del_meal(idr, user_id) # Get a new meal intolerances = database.get_intolerances(user_id) diet = database.get_diet(user_id) query = get_query(diet) meal = get_meal(query, diet, intolerances) # Insert meal into database database.update_menu(meal, user_id) # Redirect to menu return redirect("/menu")
def new_meal_plan(): # Get the user id user_id = get_user() # Delete the old meal plan database.del_meal_plan(user_id) # Search for diet in database diet = database.get_diet(user_id) # Search for intolerances in database intolerances = database.get_intolerances(user_id) # Create 5 meals and puts them into the database for food in range(5): query = get_query(diet) meal = get_meal(query, diet, intolerances) database.update_menu(meal, user_id) # Redirect to menu return redirect("/menu")
def home(): # User reached route via POST (as by submitting a form via POST) global intolerances if request.method == "POST": user_id = get_user() # Delete the old recipes database.del_meal_plan(user_id) # Get all the query options diet = request.form.get("diet") allergy = [] for intolerance in intolerances: if request.form.get(intolerance) == "true": allergy.append(intolerance) allergy = ",".join(allergy) # Puts intolerances and diet into database update_preferences(allergy, diet) # Creates a list of 5 meals and puts them into the database meals = [] for meal in range(5): meal = str(meal) while meal == None or len(meal) <= 1 or meal in meals: query = get_query(diet) meal = get_meal(query, diet, allergy) meals.append(meal) database.update_menu(meal, user_id) return redirect("/menu") else: # Renders template with diet and intolerance variable return render_template("home.html", diets=diets, intolerances=intolerances)
def entity_map(request): entities = Entity.objects.active() query_string = '' if ('q' in request.GET) and request.GET['q'].strip(): query_string = request.GET['q'] entry_query = helpers.get_query( query_string, ['name', 'description', 'short_description']) if entry_query: entities = entities.filter(entry_query) city = request.GET.get('city', '') if city: entities = entities.filter(city=city) params = { 'ajax_url': reverse('entity_list'), 'query_string': query_string, 'entities': entities, } return render(request, 'entity/map.html', params)
def search_users(request): users = User.objects.all() query_string = '' if ('q' in request.GET) and request.GET['q'].strip(): query_string = request.GET.get('q') entry_query = helpers.get_query(query_string, ['username', 'first_name', 'last_name', 'email']) if entry_query: users = users.filter(entry_query) page = request.GET.get('page') users = helpers.paginate(users, page, 5) params = { 'ajax_url': reverse('search_users'), 'query_string': query_string, 'users': users, 'page': page } response = render(request, 'profile/search_results.html', params) response['Cache-Control'] = 'no-cache' response['Vary'] = 'Accept' return response
def search(request): query_string = '' found = None if ('q' in request.GET) and request.GET['q'].strip(): query_string = request.GET['q'] entry_query = get_query(query_string, ['title', 'summary', 'complaint', 'defendants__name', 'complainants__name', 'detail__content']) found = Case.objects.filter(entry_query) #.order_by('-date_of_decision') else: query_string = '' found = Case.objects.all() class Refiner: def __init__(self, label,value ): self.label=label self.value=value # refine by date date_refiner_list = [ Refiner( 'All','all') ] years = range(1996,2015) # TODO: get max year from DB! years.reverse() for year in years: date_refiner_list.append( Refiner( str(year), str(year) ) ) date_refine = 'all' if ('date' in request.GET) and request.GET['date'].strip(): date_refine = request.GET['date'].strip() if date_refine != 'all': found = found.filter( date_of_decision__year=int(date_refine) ) # refine by issue issue_refiner_list = [ Refiner( c.prettyname,str(c.id) ) for c in Clause.objects.filter(parent=None).order_by( 'id' ) ] issue_refiner_list.insert(0, Refiner( 'All','all') ) issue_refine = 'all' if ('issue' in request.GET) and request.GET['issue'].strip(): issue_refine = request.GET['issue'].strip() if issue_refine != 'all': # c = Clause.objects.get( pk=int(issue_refine) ) found = found.filter( clauses__id=issue_refine ) # refine by outcome outcome_refiner_list = [ Refiner( o.name,str(o.id) ) for o in Outcome.objects.all().order_by( 'name' ) ] outcome_refiner_list.insert(0, Refiner( 'All','all') ) outcome_refine = 'all' if ('outcome' in request.GET) and request.GET['outcome'].strip(): outcome_refine = request.GET['outcome'].strip() if outcome_refine != 'all': found = found.filter( outcome__id=outcome_refine ) # refine by complainant_type (can select multiple) # TODO: switch the other search filters over to use this system complainant_type_refiners = [Refiner(foo[1], foo[0]) for foo in Case.COMPLAINANT_TYPE_CHOICES] complainant_types = request.GET.getlist('complainant_type') if len(complainant_types) > 0: found = found.filter(complainant_type__in=complainant_types) # refine by judgement judgement_refiners = [Refiner(foo[1], foo[0]) for foo in Case.JUDGEMENT_CHOICES] judgements = request.GET.getlist('judgement') if len(judgements) > 0: found = found.filter(judgement__in=judgements) # refine by defendant defendants = [int(id) for id in request.GET.getlist('defendant')] defendant_refiners = [Refiner(ent.name, ent.id) for ent in Entity.objects.filter(kind__exact='m',cases_as_defendant__date_of_decision__year=2010).distinct()] if len(defendants) > 0: found = found.filter(defendants__in=defendants) # refine by publication type (entity sub_kind) publication_type_refiners = [Refiner(foo[1], foo[0]) for foo in Entity.ENTITY_PUBLICATION_TYPE_CHOICES] publication_types = request.GET.getlist('publication_type') if len(publication_types) > 0: found = found.filter(defendants__publication_type__in=publication_types) extra_filters = request.GET.get('extra',False) or len(defendants)>0 or len(complainant_types)>0 or len(judgements)>0 return render_to_response('search.html', { 'query_string': query_string, 'case_list': found, 'date_refine': date_refine, 'date_refiner_list': date_refiner_list, 'issue_refine': issue_refine, 'issue_refiner_list': issue_refiner_list, 'outcome_refine': outcome_refine, 'outcome_refiner_list': outcome_refiner_list, 'complainant_types': complainant_types, 'complainant_type_refiners': complainant_type_refiners, 'publication_types': publication_types, 'publication_type_refiners': publication_type_refiners, 'judgements': judgements, 'judgement_refiners': judgement_refiners, 'defendants': defendants, 'defendant_refiners': defendant_refiners, 'extra_filters': extra_filters, }, context_instance=RequestContext(request))
"""A python web scraper for searching Amazon""" import os import csv from bs4 import BeautifulSoup from selenium import webdriver import helpers # Get Query from User query = helpers.get_query() # Setup webdriver using Google Chrome chromedriver = os.environ.get("CHROMEDRIVER") driver = webdriver.Chrome(chromedriver) # Search 20 pages records = [] for page in range(1, 21): # Open page url = helpers.create_url(query, str(page)) driver.get(url) # Extract page data soup = BeautifulSoup(driver.page_source, 'html.parser') results = soup.find_all('div', {'data-component-type': 's-search-result'}) # Extract records from data page_records = list( filter(None, [helpers.extract_item_data(item) for item in results]))