Beispiel #1
0
def plus_search(tags, search, search_types, order, extra_filter=None):
    items = get_resources_for_tag_intersection(tags)
    q = None
    for typ, info in search_types:
        if info[0]:
            typ_items = Q(**info[0])
            if info[1]:
                typ_items = typ_items & ~Q(**info[1])
        elif info[1]:
            typ_items = ~Q(**info[1])
        if not q:
            q = typ_items
        else:
            q = q | typ_items
    if extra_filter:
        q = q & Q(**extra_filter)
    if q:
        items = items.filter(q)

    results_map = {}
    tag_intersection = []
    if search:
        results = RelatedSearchQuerySet().auto_query(search)
        results_map = {}
        if results:
            all_results = results.load_all()
            all_results = all_results.load_all_queryset(GenericReference, items)
            results_map["All"] = [item.object for item in all_results]  # we really really shouldn't do this
        else:
            results_map = {"All": EmptySearchQuerySet()}
    else:
        if items:
            results_map["All"] = items.all()

    if "All" in results_map:
        tag_intersection = get_intersecting_tags(results_map["All"], n=15)
        if len(search_types) > 1:
            for typ, info in search_types:
                if info[0]:
                    typ_items = items.filter(**info[0])
                    if info[1]:
                        typ_items = typ_items.exclude(**info[1])
                elif info[1]:
                    typ_items = items.exclude(**info[1])
                if search and results:
                    typ_items = all_results.load_all_queryset(GenericReference, typ_items)
                    typ_items = [item.object for item in typ_items]  # we really really shouldn't do this
                    results_map[typ] = typ_items
    else:
        results_map = {"All": EmptySearchQuerySet()}

    search_types = [
        (typ, data[2], results_map[typ], len(results_map[typ]))
        for typ, data in search_types
        if results_map.has_key(typ)
    ]
    return (results_map["All"], search_types, tag_intersection)
Beispiel #2
0
def plus_search(tags,
                search,
                search_types,
                order=None,
                in_group=None,
                extra_filter=None):
    items = get_resources_for_tag_intersection(tags)
    q = None
    for typ, info in search_types:
        if info[0]:
            typ_items = Q(**info[0])
            if info[1]:
                typ_items = typ_items & ~Q(**info[1])
        elif info[1]:
            typ_items = ~Q(**info[1])
        if not q:
            q = typ_items
        else:
            q = q | typ_items

    if extra_filter:
        q = q & Q(**extra_filter)

    if q:
        items = items.filter(q)

    included = None
    for obj_class, included_filter in object_type_filters.items():
        objs = obj_class.objects.filter(**included_filter)
        included_search = {
            'content_type__model': obj_class.__name__.lower(),
            'object_id__in': objs
        }

        if not included:
            included = Q(**included_search)
        included = included | Q(**included_search)

    items = items.filter(included)

    if in_group:
        # this should be implemented using the code just above and an external search filter arg
        page_ids = WikiPage.objects.filter(in_agent=in_group, stub=False)
        wikipages = Q(**{
            'content_type__model': 'wikipage',
            'object_id__in': page_ids
        })
        resource_ids = Resource.objects.filter(in_agent=in_group, stub=False)
        resources = Q(**{
            'content_type__model': 'resource',
            'object_id__in': resource_ids
        })

        q = resources | wikipages
        items = items.filter(q)

    results_map = {}
    tag_intersection = []
    if search:
        results = RelatedSearchQuerySet().auto_query(search)
        results_map = {}
        if results:
            all_results = results.load_all_queryset(GenericReference, items)
            if order == 'relevance':
                all_results = all_results.load_all(
                )  # this bit is quite evil and makes things really inefficient for large searches
                # a better approach would be to get all the ids directly from the fulltext index and use them as a filter for GenericReferences
                results_map['All'] = [item.object for item in all_results
                                      ]  #we really really shouldn't do this
                items_len = len(results_map['All'])
            else:
                search_results = [result.pk for result in all_results]
                results_map['All'] = items.filter(
                    id__in=search_results).order_by(order)
                items_len = results_map['All'].count()
        else:
            results_map = {'All': EmptySearchQuerySet()}
            items_len = 0
    else:
        if items:
            items = items.order_by('creator')
            items_len = items.count()
            results_map['All'] = items
        else:
            items_len = 0
            results_map = {'All': EmptySearchQuerySet()}

    if order == 'modified':
        results_map['All'] = results_map['All'].order_by('-' + order)
    elif order == 'display_name':
        results_map['All'] = results_map['All'].order_by(order)

    if 'All' in results_map:
        tag_intersection = get_intersecting_tags(results_map['All'], n=15)

        if len(search_types) > 1:
            for typ, info in search_types:
                if info[0]:
                    typ_items = items.filter(**info[0])
                if info[1]:
                    typ_items = items.exclude(**info[1])
                if search and results and order == 'relevance':
                    # why do this again when we could just separate results using python
                    typ_items = all_results.load_all_queryset(
                        GenericReference, typ_items)
                    typ_items = [item.object for item in typ_items
                                 ]  #we really really shouldn't do this
                if typ_items:
                    results_map[typ] = typ_items
    else:
        results_map = {'All': EmptySearchQuerySet()}

    search_type_data = []
    for typ, data in search_types:
        if results_map.has_key(typ):
            try:
                type_len = results_map[typ].count()
            except TypeError:
                type_len = len(results_map[typ])

            search_type_data.append((typ, data[2], results_map[typ], type_len))

    return {
        'All': results_map['All'],
        'items_len': items_len,
        'search_types': search_type_data,
        'tag_intersection': tag_intersection
    }
Beispiel #3
0
def plus_search(tags, search, search_types, order=None, in_group=None, extra_filter=None):
    items = get_resources_for_tag_intersection(tags)
    q = None
    for typ, info in search_types:
        if info[0]:
            typ_items = Q(**info[0])
            if info[1]:
                typ_items = typ_items & ~Q(**info[1])
        elif info[1]:
            typ_items = ~Q(**info[1])
        if not q:
            q = typ_items
        else:
            q = q | typ_items

    if extra_filter:
        q = q & Q(**extra_filter)
        
    if q:
        items = items.filter(q)

    included = None
    for obj_class, included_filter in object_type_filters.items():
        objs = obj_class.objects.filter(**included_filter)
        included_search = {'content_type__model':obj_class.__name__.lower(),
                           'object_id__in':objs}
        
        if not included:
            included = Q(**included_search)
        included = included | Q(**included_search)

    items = items.filter(included)

    if in_group:
        # this should be implemented using the code just above and an external search filter arg
        page_ids = WikiPage.objects.filter(in_agent=in_group, stub=False)
        wikipages = Q(**{'content_type__model':'wikipage',
                         'object_id__in':page_ids})
        resource_ids = Resource.objects.filter(in_agent=in_group, stub=False)
        resources = Q(**{'content_type__model':'resource',
                         'object_id__in':resource_ids})

        q = resources | wikipages
        items = items.filter(q)
 
    results_map = {}
    tag_intersection = []
    if search:
        results = RelatedSearchQuerySet().auto_query(search)
        results_map = {}
        if results:
            all_results = results.load_all_queryset(GenericReference, items)
            if order == 'relevance':
                all_results = all_results.load_all()  # this bit is quite evil and makes things really inefficient for large searches
                                              # a better approach would be to get all the ids directly from the fulltext index and use them as a filter for GenericReferences 
                results_map['All'] = [item.object for item in all_results]   #we really really shouldn't do this 
                items_len = len(results_map['All'])
            else:
                search_results = [result.pk for result in all_results]
                results_map['All'] = items.filter(id__in=search_results).order_by(order)
                items_len = results_map['All'].count()
        else:
            results_map = {'All':EmptySearchQuerySet()}
            items_len = 0
    else:
        if items:
            items = items.order_by('creator')
            items_len = items.count()
            results_map['All'] = items
        else:
            items_len = 0
            results_map = {'All':EmptySearchQuerySet()}            

    if order == 'modified':
        results_map['All'] = results_map['All'].order_by('-' + order)
    elif order == 'display_name':
        results_map['All'] = results_map['All'].order_by(order)        

    if 'All' in results_map:
        tag_intersection = get_intersecting_tags(results_map['All'], n=15)

        if len(search_types) > 1:
            for typ, info in search_types:
                if info[0]:
                    typ_items = items.filter(**info[0])
                if info[1]:
                    typ_items = items.exclude(**info[1])
                if search and results and order == 'relevance':
                        # why do this again when we could just separate results using python
                        typ_items = all_results.load_all_queryset(GenericReference, typ_items)
                        typ_items = [item.object for item in typ_items] #we really really shouldn't do this
                if typ_items:
                    results_map[typ] = typ_items
    else:
        results_map = {'All':EmptySearchQuerySet()}

    search_type_data = []
    for typ, data in search_types:
        if results_map.has_key(typ):
            try:
                type_len = results_map[typ].count()
            except TypeError:
                type_len = len(results_map[typ])
            
            search_type_data.append((typ, data[2], results_map[typ], type_len))

    return {'All':results_map['All'], 'items_len':items_len, 'search_types':search_type_data, 'tag_intersection':tag_intersection}