Exemple #1
0
class Search(search_views.SearchView):
    def get_redirect_search_entity(self):
        return next(s for s in self.search_entities if s.name == 'product')

    # ignore CPDBear
    search_entities = [
        search_engine.Search(
            name='category',
            qs=Category.objects.active(),
            fields=['name'],  # Ignore CPDBear
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        ),
        search_engine.Search(
            name='product',
            qs=Product.objects.active(),
            fields=['name'],
            redirect_field='vendor_code',
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        ),
        search_engine.Search(
            name='page',  # Ignore CPDBear
            qs=Page.objects.filter(is_active=True).exclude(
                type=Page.MODEL_TYPE),
            fields=['name'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        )
    ]

    redirect_field = 'vendor_code'
Exemple #2
0
class Search(search_views.SearchView):
    def get_redirect_search_entity(self):
        return next(s for s in self.search_entities if s.name == 'product')

    # ignore CPDBear
    search_entities = [
        search_engine.Search(
            name='category',
            qs=Category.objects.all(),
            fields=['name'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        ),
        search_engine.Search(
            name='product',
            qs=Product.objects.all(),
            fields=['name'],
            redirect_field='vendor_code',
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        ),
        search_engine.Search(
            name='page',
            qs=ExcludedModelTPage.objects.all(),
            fields=['name'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        )
    ]

    redirect_field = 'vendor_code'
Exemple #3
0
class Autocomplete(search_views.AutocompleteView):

    # ignore CPDBear
    search_entities = [
        search_engine.Search(
            name='category',
            qs=Category.objects.filter(page__is_active=True),
            fields=['name', 'id'],
            template_fields=['name', 'url'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        ),
        search_engine.Search(
            name='product',
            qs=Product.objects.active(),
            fields=['name', 'id', 'vendor_code'],
            template_fields=['name', 'price', 'url'],  # Ignore CPDBear
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        ),
        search_engine.Search(
            name='pages',
            qs=Page.objects.filter(is_active=True).exclude(
                type=Page.MODEL_TYPE),
            fields=['name'],
            template_fields=['name', 'url'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        )
    ]

    see_all_label = settings.SEARCH_SEE_ALL_LABEL
Exemple #4
0
class Autocomplete(search_views.AutocompleteView):

    # ignore CPDBear
    search_entities = [
        search_engine.Search(
            name='category',
            qs=Category.objects.all(),
            fields=['name', 'id'],
            template_fields=['name', 'url'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        ),
        search_engine.Search(
            name='product',
            qs=Product.objects.all(),
            fields=['name', 'id', 'vendor_code'],
            template_fields=['name', 'price', 'url'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        ),
        search_engine.Search(
            name='pages',
            qs=ExcludedModelTPage.objects.all(),
            fields=['name'],
            template_fields=['name', 'url'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        )
    ]

    see_all_label = settings.SEARCH_SEE_ALL_LABEL
Exemple #5
0
class MockSearchView(search.SearchView):
    search_entities = [
        search_engine.Search(
            name='category',
            qs=MockCategory.objects.all(),
            fields=['name', 'id'],
        ),
        search_engine.Search(
            name='product',
            qs=MockProduct.objects.all(),
            fields=['name', 'id'],
        )
    ]
Exemple #6
0
class MockAdminAutocompleteView(search.AdminAutocompleteView):
    search_entities = [
        search_engine.Search(
            name='category',
            qs=MockCategory.objects.all(),
            fields=['name', 'id'],
            template_fields=['name', 'url'],
        ),
        search_engine.Search(
            name='product',
            qs=MockProduct.objects.all(),
            fields=['name', 'id'],
            template_fields=['name', 'price', 'url'],
        )
    ]
Exemple #7
0
class AdminAutocomplete(search_views.AdminAutocompleteView):

    # ignore CPDBear
    search_entities = [
        search_engine.Search(
            name='category',
            qs=Category.objects.all(),
            fields=['name'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        ),
        search_engine.Search(
            name='product',
            qs=Product.objects.all(),
            fields=['name'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        ),
        search_engine.Search(
            name='pages',
            qs=ExcludedModelTPage.objects.all(),
            fields=['name'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        )
    ]
Exemple #8
0
class AdminAutocomplete(search_views.AdminAutocompleteView):

    # ignore CPDBear
    search_entities = [
        search_engine.Search(
            name='category',
            qs=Category.objects.filter(page__is_active=True),
            fields=['name'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        ),
        search_engine.Search(
            name='product',
            qs=Product.objects.active(),
            fields=['name'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        ),
        search_engine.Search(
            name='pages',
            qs=Page.objects.filter(is_active=True).exclude(type=Page.MODEL_TYPE),
            fields=['name'],
            min_similarity=settings.TRIGRAM_MIN_SIMILARITY,
        )
    ]
def _generate_results_for(city, week_start):
    start_time = week_start
    end_time = start_time + datetime.timedelta(days=8)

    latlng_bounds = ((city.latitude, city.longitude), (city.latitude,
                                                       city.longitude))
    city_bounds = math.expand_bounds(latlng_bounds, cities.NEARBY_DISTANCE_KM)
    search_query = search_base.SearchQuery(
        time_period=search_base.TIME_ALL_FUTURE,
        start_date=start_time,
        end_date=end_time,
        bounds=city_bounds)
    searcher = search.Search(search_query)
    search_results = searcher.get_search_results(full_event=True)
    return search_results
Exemple #10
0
def promote_events_to_user(user):
    # TODO: Adjust when we have iphone notifications
    if not android.can_notify(user):
        return

    logging.info("Promoting new events to user %s", user.fb_uid)
    # Only send notifications for Mike for now
    user = users.User.get_by_id(user.fb_uid)
    if not user:
        logging.error("No user found: %s", user.fb_uid)
        return
    if user.expired_oauth_token:
        logging.info("User has expired token, aborting: %s", user.fb_uid)
        return

    user_location = user.location
    if not user_location:
        return
    distance_in_km = user.distance_in_km()
    min_attendees = user.min_attendees

    # search for relevant events
    geocode = gmaps_api.get_geocode(address=user_location)
    if not geocode:
        return None
    bounds = math.expand_bounds(geocode.latlng_bounds(), distance_in_km)
    query = search_base.SearchQuery(time_period=search_base.TIME_UPCOMING,
                                    bounds=bounds,
                                    min_attendees=min_attendees)

    one_day_ago = time.mktime(
        (datetime.datetime.now() - datetime.timedelta(hours=24)).timetuple())

    search_query = search.Search(query)
    search_query.extra_fields = ['creation_time']
    search_results = search_query._get_candidate_doc_events()
    # TODO: can we move this filter into the search query itself??
    recent_events = [
        x.doc_id for x in search_results
        if x.field('creation_time').value > one_day_ago
    ]

    logging.info("Found %s search_results, %s new events", len(search_results),
                 len(recent_events))
    for event_id in recent_events:
        if android.add_notify(user, event_id):
            logging.info("Sent notification!")
Exemple #11
0
    def get(self):
        self.finish_preload()
        form = search_base.SearchForm(
            formdata=self.request.GET,
            data=self.user.dict_for_form() if self.user else None)
        if not form.validate():
            logging.warning("Form errors: %s", form.errors)
            self.write_json_response([])
            return
        search_query = form.build_query(start_end_query=True)
        search_results = search.Search(search_query).get_search_results()

        if 'class' in form.deb.data:
            from classes import class_index
            class_results = class_index.ClassSearch(
                search_query).get_search_results()
            search_results += class_results
            search_results.sort(key=lambda x: x.start_time)

        json_results = []
        for result in search_results:
            start_time = result.start_time
            end_time = result.fake_end_time
            duration = end_time - start_time
            if duration > datetime.timedelta(days=5):
                end_time = start_time
            elif duration <= datetime.timedelta(days=1):
                end_time = start_time
            all_day = False
            title = '@ %s\n\n%s' % (result.actual_city_name, result.name)
            json_results.append(
                dict(
                    id=result.event_id,
                    title=title,
                    start=start_time.strftime('%Y-%m-%dT%H:%M:%SZ'),
                    end=end_time.strftime('%Y-%m-%dT%H:%M:%SZ'),
                    url=urls.dd_event_url(result.event_id),
                    allDay=all_day,
                ))
        self.write_json_response(json_results)
    def get(self, name):
        topics = topic_db.Topic.query(topic_db.Topic.url_path == name).fetch(1)
        if not topics:
            self.response.set_status(404)
            return

        topic = topics[0]
        topic.init()

        def prefilter(doc_event):
            """Function for fitlering doc results, before we spend the energy to load the corresponding DBEvents.

            We only want on-topic events here:
            - Must contain keyword in the title
            - Must contain keyword on a line where it makes up >10% of the text (for judges, workshops, etc). We want to hide the resume-includes-classes-from-X people
            """
            logging.info("Prefiltering event %s", doc_event.doc_id)
            name = doc_event.field('name').value.lower()
            description = doc_event.field('description').value.lower()

            description_lines = description.split('\n')

            for keyword in topic.search_keywords:
                keyword_word_re = re.compile(r'\b%s\b' % keyword)
                if keyword_word_re.search(name):
                    return True
                for line in description_lines:
                    result = keyword_word_re.search(line)
                    # If the keyword is more than 10% of the text in the line:
                    # Examples:
                    #   "- HOUSE - KAPELA (Serial Stepperz/Wanted Posse)"
                    #   "5th November : EVENT Judged by HIRO :"
                    if result:
                        if 1.0 * len(keyword) / len(line) > 0.1:
                            return True
                        else:
                            logging.info(
                                "Found keyword %r on line, but not long enough: %r",
                                keyword, line)

            logging.info("Prefilter dropping event %s with name: %r" %
                         (doc_event.doc_id, name))
            return False

        keywords = ' OR '.join('"%s"' % x for x in topic.search_keywords)
        search_query = search_base.SearchQuery(keywords=keywords)
        # Need these fields for the prefilter
        search_query.extra_fields = ['name', 'description']
        # TODO: query needs to include the 'all time' bits somehow, so we can grab all events for our topic pages
        searcher = search.Search(search_query)
        searcher.search_index = search.AllEventsIndex
        search_results = searcher.get_search_results(prefilter=prefilter)

        json_search_response = api.build_search_results_api(
            None,
            search_query,
            search_results, (2, 0),
            need_full_event=False,
            geocode=None,
            distance=None)

        videos = get_videos_for(topic.youtube_query)

        if topic.social().get('instagram'):
            instagrams = get_instagrams_for(topic.social()['instagram'])
        else:
            instagrams = {'items': []}

        topic_json = {
            'title': topic.title(),
            'description': topic.description(),
            'image_url': topic.image_url(),
            'social': topic.social(),
        }

        props = dict(
            response=json_search_response,
            videos=videos,
            instagrams=instagrams,
            topic=topic_json,
        )

        self.setup_react_template('topic.js', props)
        self.render_template('topic')
Exemple #13
0
    def get(self, name):
        topics = topic_db.Topic.query(topic_db.Topic.url_path == name).fetch(1)
        if not topics:
            self.response.set_status(404)
            return

        topic = topics[0]

        if topic.graph_id:
            # We shouldn't need any tokens to access pages
            fbl = fb_api.FBLookup(None, None)
            fb_source = fbl.get(topic_db.LookupTopicPage, topic.graph_id)
        else:
            fb_source = None

        def prefilter(doc_event):
            """Function for fitlering doc results, before we spend the energy to load the corresponding DBEvents.

            We only want on-topic events here:
            - Must contain keyword in the title
            - Must contain keyword on a line where it makes up >10% of the text (for judges, workshops, etc). We want to hide the resume-includes-classes-from-X people
            """
            logging.info("Prefiltering event %s", doc_event.doc_id)
            name = doc_event.field('name').value.lower()
            description = doc_event.field('description').value.lower()

            description_lines = description.split('\n')

            for keyword in topic.search_keywords:
                keyword_word_re = re.compile(r'\b%s\b' % keyword)
                if keyword_word_re.search(name):
                    return True
                for line in description_lines:
                    result = keyword_word_re.search(line)
                    # If the keyword is more than 10% of the text in the line:
                    # Examples:
                    #   "- HOUSE - KAPELA (Serial Stepperz/Wanted Posse)"
                    #   "5th November : EVENT Judged by HIRO :"
                    if result:
                        if 1.0 * len(keyword) / len(line) > 0.1:
                            return True
                        else:
                            logging.info(
                                "Found keyword %r on line, but not long enough: %r",
                                keyword, line)

            logging.info("Prefilter dropping event %s with name: %r" %
                         (doc_event.doc_id, name))
            return False

        keywords = ' OR '.join('"%s"' % x for x in topic.search_keywords)
        search_query = search_base.SearchQuery(keywords=keywords)
        # Need these fields for the prefilter
        search_query.extra_fields = ['name', 'description']
        search_results = search.Search(search_query).get_search_results(
            prefilter=prefilter)

        self.display['topic_title'] = topic.override_title or (
            fb_source and fb_source['info']['name'])
        self.display['topic_image'] = topic.override_image or (
            fb_source and fb_source['picture']['data']['url'])
        self.display['topic_description'] = topic.override_description or (
            fb_source and fb_source['info'].get('about')) or ''

        self.display['all_results'] = search_results

        by_year = []
        for year, month_events in sorted(
                grouping.group_results_by_date(search_results).items()):
            by_year.append((year, sorted(month_events.items())))
        self.display['group_by_date'] = by_year
        by_country = sorted(
            grouping.group_results_by_location(search_results).items(),
            key=lambda x: (-len(x[1]), x[0]))
        self.display['group_by_location'] = by_country

        # TODO:
        # show points on map (future and past?)
        # show future events
        # show past events
        # show high quality and low quality events (most viable with 'past')
        # have an ajax filter on the page that lets me filter by location?
        self.display['fb_page'] = fb_source

        self.render_template('topic')
Exemple #14
0
    def get(self):
        data = {
            'location': self.request.get('location'),
            'keywords': self.request.get('keywords'),
            'locale': self.request.get('locale'),
        }
        # If it's 1.0 clients, or web clients, then grab all data
        if self.version == (1, 0):
            time_period = search_base.TIME_UPCOMING
        else:
            time_period = self.request.get('time_period')
        data['time_period'] = time_period
        form = search_base.SearchForm(data=data)

        if not form.validate():
            for field, errors in form.errors.items():
                for error in errors:
                    self.add_error(u"%s error: %s" %
                                   (getattr(form, field).label.text, error))

        if not form.location.data:
            city_name = None
            southwest = None
            northeast = None
            if not form.keywords.data:
                if self.version == (1, 0):
                    self.write_json_success({'results': []})
                    return
                else:
                    self.add_error('Please enter a location or keywords')
        else:
            place = gmaps_api.fetch_place_as_json(query=form.location.data,
                                                  language=form.locale.data)
            if place['status'] == 'OK' and place['results']:
                geocode = gmaps_api.GMapsGeocode(place['results'][0])
                southwest, northeast = math.expand_bounds(
                    geocode.latlng_bounds(), form.distance_in_km())
                city_name = place['results'][0]['formatted_address']
                # This will fail on a bad location, so let's verify the location is geocodable above first.
            else:
                if self.version == (1, 0):
                    self.write_json_success({'results': []})
                    return
                else:
                    self.add_error('Could not geocode location')

        self.errors_are_fatal()

        search_results = []
        distances = [50, 100, 170, 300]
        distance_index = 0
        while not search_results:
            form.distance.data = distances[distance_index]
            form.distance_units.data = 'miles'
            search_query = form.build_query()
            searcher = search.Search(search_query)
            # TODO(lambert): Increase the size limit when our clients can handle it. And improve our result sorting to return the 'best' results.
            searcher.limit = 500
            search_results = searcher.get_search_results(full_event=True)

            # Increase our search distance in the hopes of finding something
            distance_index += 1
            if distance_index == len(distances):
                # If we searched the world, then break
                break

        logging.info("Found %r events within %s %s of %s", form.keywords.data,
                     form.distance.data, form.distance_units.data,
                     form.location.data)
        onebox_links = onebox.get_links_for_query(search_query)

        json_results = []
        for result in search_results:
            try:
                json_result = canonicalize_event_data(result.db_event,
                                                      result.event_keywords)
                json_results.append(json_result)
            except Exception as e:
                logging.exception("Error processing event %s: %s" %
                                  (result.event_id, e))

        title = self._get_title(city_name, form.keywords.data)

        json_response = {
            'results': json_results,
            'onebox_links': onebox_links,
            'title': title,
            'location': city_name,
            'query': data,
        }
        if southwest and northeast:
            json_response['location_box'] = {
                'southwest': {
                    'latitude': southwest[0],
                    'longitude': southwest[1],
                },
                'northeast': {
                    'latitude': northeast[0],
                    'longitude': northeast[1],
                },
            }
        self.write_json_success(json_response)