def test_copy_and_mutate(self):
     schema = mock.Mock()
     chain = FilterChain(schema=schema)
     chain.lookup_descriptions.append(1)
     chain.base_url = 'http://xyz'
     chain['foo'] = 'bar'
     chain['qux'] = 'whee'
     clone = chain.copy()
     # Attributes are copied...
     self.assertEqual(clone.lookup_descriptions, [1])
     self.assertEqual(clone.base_url, chain.base_url)
     self.assertEqual(clone.schema, chain.schema, schema)
     # ... and mutating them doesn't affect the original.
     clone.lookup_descriptions.pop()
     self.assertEqual(chain.lookup_descriptions, [1])
     # Likewise, items are copied, and mutating doesn't affect the copy.
     self.assertEqual(clone['foo'], 'bar')
     del chain['foo']
     self.assertEqual(clone['foo'], 'bar')
     del clone['qux']
     self.assertEqual(chain['qux'], 'whee')
     # Likewise, clearing.
     clone.clear()
     self.assertEqual(clone.items(), [])
     self.assertEqual(chain['qux'], 'whee')
Esempio n. 2
0
def get_featured_lookups_by_schema(context):
    """
    Get all :ref:`featured_lookups` names and URLs for them; 
    puts in the context as
    'featured_lookups', a mapping grouped by schema.

    Example:

    .. code-block:: html+django

        {% get_featured_lookups_by_schema %}
        {% for schema, lookups in featured_lookups.items %}
           <ul>{{ schema }}
            {% for info in lookups %}
              <a href="{{ info.url }}">{{ info.lookup }}</a>
               ...
            {% endfor %}
        {% endfor %}
    """
    lookups = {}
    for lookup in Lookup.objects.filter(featured=True).select_related():
        sf = lookup.schema_field
        schema = sf.schema
        filters = FilterChain(schema=schema)
        filters.add(sf, lookup)
        info = {'lookup': lookup.name, 'url': filters.make_url()}
        lookups.setdefault(schema.slug, []).append(info)
    context['featured_lookups'] = lookups
    return u''
Esempio n. 3
0
 def test_copy_and_mutate(self):
     schema = mock.Mock()
     chain = FilterChain(schema=schema)
     chain.lookup_descriptions.append(1)
     chain.base_url = 'http://xyz'
     chain['foo'] = 'bar'
     chain['qux'] = 'whee'
     clone = chain.copy()
     # Attributes are copied...
     self.assertEqual(clone.lookup_descriptions, [1])
     self.assertEqual(clone.base_url, chain.base_url)
     self.assertEqual(clone.schema, chain.schema, schema)
     # ... and mutating them doesn't affect the original.
     clone.lookup_descriptions.pop()
     self.assertEqual(chain.lookup_descriptions, [1])
     # Likewise, items are copied, and mutating doesn't affect the copy.
     self.assertEqual(clone['foo'], 'bar')
     del chain['foo']
     self.assertEqual(clone['foo'], 'bar')
     del clone['qux']
     self.assertEqual(chain['qux'], 'whee')
     # Likewise, clearing.
     clone.clear()
     self.assertEqual(clone.items(), [])
     self.assertEqual(chain['qux'], 'whee')
    def test_sort__real_filters(self):
        req = mock.Mock()
        qs = mock.Mock()
        schema = mock.Mock()
        context = {'newsitem_qs': qs, 'schema': schema}
        from ebpub.db.schemafilters import TextSearchFilter, BoolFilter
        from ebpub.db.schemafilters import LookupFilter, LocationFilter
        from ebpub.db.schemafilters import DateFilter

        def mock_schemafield(name):
            # mock.Mock(name='foo') does something magic, but I just
            # want to set the name attribute.
            sf = mock.Mock()
            sf.name = name
            return sf

        all_filters = [
            TextSearchFilter(req, context, qs, 'hi',
                             schemafield=mock_schemafield(name='mock text sf')),
            BoolFilter(req, context, qs, 'yes',
                       schemafield=mock_schemafield(name='mock bool sf')),
            LookupFilter(req, context, qs,
                         schemafield=mock_schemafield(name='mock lookup sf')),
            LocationFilter(req, context, qs, 'neighborhoods'),
            DateFilter(req, context, qs, '2011-04-11', '2011-04-12'),
            ]
        chain = FilterChain([(item.slug, item) for item in all_filters])
        ordered_chain = chain.copy()
        ordered_chain.sort()
        self.assertEqual(ordered_chain.keys(),
                         ['date', 'mock bool sf', 'location', 'mock lookup sf', 'mock text sf'])
Esempio n. 5
0
 def _make_chain(self, url):
     request = RequestFactory().get(url)
     crime = models.Schema.objects.get(slug='crime')
     context = {'schema': crime}
     chain = FilterChain(request=request, context=context, schema=crime)
     chain.update_from_request(filter_sf_dict={})
     return chain
Esempio n. 6
0
def schema_filter(request, slug, args_from_url):
    """
    List NewsItems for one schema, filtered by various criteria in the
    URL (date, location, or values of SchemaFields).
    """
    s = get_object_or_404(get_schema_manager(request), slug=slug, is_special_report=False)
    if not s.allow_charting:
        return HttpResponsePermanentRedirect(s.url())

    context = {
        'bodyclass': 'schema-filter',
        'bodyid': s.slug,
        'schema': s,
        }
    # Breadcrumbs. We can assign this early because it's a generator,
    # so it'll get the full context no matter what.
    context['breadcrumbs'] = breadcrumbs.schema_filter(context)

    filter_sf_list = list(SchemaField.objects.filter(schema__id=s.id, is_filter=True).order_by('display_order'))
    textsearch_sf_list = list(SchemaField.objects.filter(schema__id=s.id, is_searchable=True).order_by('display_order'))

    # Use SortedDict to preserve the display_order.
    filter_sf_dict = SortedDict([(sf.name, sf) for sf in filter_sf_list] + [(sf.name, sf) for sf in textsearch_sf_list])

    # Determine what filters to apply, based on path and/or query string.
    filterchain = FilterChain(request=request, context=context, schema=s)
    context['filters'] = filterchain
    try:
        filterchain.update_from_request(args_from_url, filter_sf_dict)
        filters_need_more = filterchain.validate()
    except FilterError, e:
        if getattr(e, 'url', None) is not None:
            return HttpResponseRedirect(e.url)
        raise Http404(str(e))
Esempio n. 7
0
def get_featured_lookups_by_schema(context):
    """
    Get all :ref:`featured_lookups` names and URLs for them; 
    puts in the context as
    'featured_lookups', a mapping grouped by schema.

    Example:

    .. code-block:: html+django

        {% get_featured_lookups_by_schema %}
        {% for schema, lookups in featured_lookups.items %}
           <ul>{{ schema }}
            {% for info in lookups %}
              <a href="{{ info.url }}">{{ info.lookup }}</a>
               ...
            {% endfor %}
        {% endfor %}
    """
    lookups = {}
    for lookup in Lookup.objects.filter(featured=True).select_related():
        sf = lookup.schema_field
        schema = sf.schema
        filters = FilterChain(schema=schema)
        filters.add(sf, lookup)
        info = {"lookup": lookup.name, "url": filters.make_url()}
        lookups.setdefault(schema.slug, []).append(info)
    context["featured_lookups"] = lookups
    return u""
    def test_sort__real_filters(self):
        req = mock.Mock()
        qs = mock.Mock()
        schema = mock.Mock()
        context = {"newsitem_qs": qs, "schema": schema}
        from ebpub.db.schemafilters import TextSearchFilter, BoolFilter
        from ebpub.db.schemafilters import LookupFilter, LocationFilter
        from ebpub.db.schemafilters import DateFilter

        def mock_schemafield(name):
            # mock.Mock(name='foo') does something magic, but I just
            # want to set the name attribute.
            sf = mock.Mock()
            sf.name = name
            return sf

        all_filters = [
            TextSearchFilter(req, context, qs, "hi", schemafield=mock_schemafield(name="mock text sf")),
            BoolFilter(req, context, qs, "yes", schemafield=mock_schemafield(name="mock bool sf")),
            LookupFilter(req, context, qs, schemafield=mock_schemafield(name="mock lookup sf")),
            LocationFilter(req, context, qs, "neighborhoods"),
            DateFilter(req, context, qs, "2011-04-11", "2011-04-12"),
        ]
        chain = FilterChain([(item.slug, item) for item in all_filters])
        ordered_chain = chain.copy()
        ordered_chain.sort()
        self.assertEqual(ordered_chain.keys(), ["date", "mock bool sf", "location", "mock lookup sf", "mock text sf"])
 def _make_chain(self, url):
     request = RequestFactory().get(url)
     crime = models.Schema.objects.get(slug='crime')
     context = {'schema': crime}
     chain = FilterChain(request=request, context=context, schema=crime)
     chain.update_from_request(filter_sf_dict={})
     return chain
Esempio n. 10
0
 def _make_chain(self, url):
     request = RequestFactory().get(url)
     argstring = request.path.split('filter/', 1)[-1]
     crime = models.Schema.objects.get(slug='crime')
     context = {'schema': crime}
     chain = FilterChain(request=request, context=context, schema=crime)
     chain.update_from_request(argstring=argstring, filter_sf_dict={})
     return chain
Esempio n. 11
0
 def test_add__pubdate(self):
     # Key gets overridden.
     chain = FilterChain()
     import datetime
     chain.add('pubdate', datetime.date(2011, 8, 13))
     self.assert_(chain.has_key('date'))
     self.failIf(chain.has_key('pubdate'))
     self.assertEqual(chain['date'].start_date, datetime.date(2011, 8, 13))
Esempio n. 12
0
 def test_ordering(self):
     chain = FilterChain()
     args = range(10)
     random.shuffle(args)
     for i in args:
         chain[i] = i
     self.assertEqual(chain.items(), [(i, i) for i in args])
     self.assertEqual(chain.keys(), args)
Esempio n. 13
0
 def test_add_date__whole_month(self):
     # Special syntax for adding a whole month, convenient for templates
     # where you don't want to have to calculate the end date.
     chain = FilterChain()
     import datetime
     chain.add('date', datetime.date(2011, 8, 13), 'month')
     self.assertEqual(chain['date'].start_date, datetime.date(2011, 8, 1))
     self.assertEqual(chain['date'].end_date, datetime.date(2011, 8, 31))
Esempio n. 14
0
 def _make_chain(self, url):
     request = RequestFactory().get(url)
     argstring = request.path.split("filter/", 1)[-1]
     crime = models.Schema.objects.get(slug="crime")
     context = {"schema": crime}
     chain = FilterChain(request=request, context=context, schema=crime)
     chain.update_from_request(argstring=argstring, filter_sf_dict={})
     return chain
Esempio n. 15
0
 def test_ordering(self):
     chain = FilterChain()
     args = range(10)
     random.shuffle(args)
     for i in args:
         chain[i] = i
     self.assertEqual(chain.items(), [(i, i) for i in args])
     self.assertEqual(chain.keys(), args)
Esempio n. 16
0
 def test_add_date__whole_month(self):
     # Special syntax for adding a whole month, convenient for templates
     # where you don't want to have to calculate the end date.
     chain = FilterChain()
     import datetime
     chain.add('date', datetime.date(2011, 8, 13), 'month')
     self.assertEqual(chain['date'].start_date, datetime.date(2011, 8, 1))
     self.assertEqual(chain['date'].end_date, datetime.date(2011, 8, 31))
Esempio n. 17
0
    def test_filters_for_display(self):
        class Dummy(object):
            def __init__(self, label):
                self.label = label

        chain = FilterChain(
            [("foo", Dummy("yes")), ("bar", Dummy(None)), ("bat", Dummy("yes also")), ("baz", Dummy(None))]
        )
        self.assertEqual(len(chain.values()), 4)
        self.assertEqual(len(chain.filters_for_display()), 2)
        self.assert_(all([f.label for f in chain.filters_for_display()]))
Esempio n. 18
0
    def test_update_from_request__empty(self):
        request = mock.Mock()
        class StubQueryDict(dict):
            def getlist(self, key):
                return []
            def copy(self):
                return StubQueryDict(self.items())

        request.GET = StubQueryDict()
        chain = FilterChain(request=request)
        chain.update_from_request({})
        self.assertEqual(len(chain), 0)
Esempio n. 19
0
 def test_filters_for_display(self):
     class Dummy(object):
         def __init__(self, label):
             self.label = label
     chain = FilterChain([('foo', Dummy('yes')),
                          ('bar', Dummy(None)),
                          ('bat', Dummy('yes also')),
                          ('baz', Dummy(None)),
                          ])
     self.assertEqual(len(chain.values()), 4)
     self.assertEqual(len(chain.filters_for_display()), 2)
     self.assert_(all([f.label for f in chain.filters_for_display()]))
Esempio n. 20
0
 def test_add_by_place_id(self, mock_get_object_or_404):
     chain = FilterChain()
     from ebpub.streets.models import Block
     from ebpub.db.schemafilters import BlockFilter
     block = Block(city='city', street_slug='street_slug',
                   pretty_name='pretty_name',
                   street_pretty_name='street_pretty_name',
                   street='street',
                   from_num='123', to_num='456',
                   )
     mock_get_object_or_404.return_value = block
     chain.add_by_place_id('b:123.1')
     self.assert_(isinstance(chain['location'], BlockFilter))
Esempio n. 21
0
    def test_filters_for_display(self):
        class Dummy(object):
            def __init__(self, label):
                self.label = label

        chain = FilterChain([
            ('foo', Dummy('yes')),
            ('bar', Dummy(None)),
            ('bat', Dummy('yes also')),
            ('baz', Dummy(None)),
        ])
        self.assertEqual(len(chain.values()), 4)
        self.assertEqual(len(chain.filters_for_display()), 2)
        self.assert_(all([f.label for f in chain.filters_for_display()]))
Esempio n. 22
0
    def test_update_from_request__empty(self):
        request = mock.Mock()

        class StubQueryDict(dict):
            def getlist(self, key):
                return []

            def copy(self):
                return StubQueryDict(self.items())

        request.GET = StubQueryDict()
        chain = FilterChain(request=request)
        chain.update_from_request({})
        self.assertEqual(len(chain), 0)
Esempio n. 23
0
    def test_sort(self):
        class Dummy(object):
            def __init__(self, sort_value):
                self._sort_value = sort_value

        dummies = [Dummy(i) for i in range(10)]
        random.shuffle(dummies)
        chain = FilterChain()
        for i in range(10):
            chain[i] = dummies[i]

        self.assertNotEqual(range(10), [v._sort_value for v in chain.values()])

        normalized = chain.copy()
        normalized.sort()
        self.assertEqual(range(10), [v._sort_value for v in normalized.values()])
Esempio n. 24
0
 def test_add_by_place_id__bad(self):
     chain = FilterChain()
     from django.http import Http404
     self.assertRaises(Http404, chain.add_by_place_id, '')
     self.assertRaises(Http404, chain.add_by_place_id, 'blah')
     self.assertRaises(Http404, chain.add_by_place_id, 'b:123.1')
     self.assertRaises(Http404, chain.add_by_place_id, 'l:9999')
Esempio n. 25
0
 def test_add_by_place_id(self, mock_get_object_or_404):
     chain = FilterChain()
     from ebpub.streets.models import Block
     from ebpub.db.schemafilters import BlockFilter
     block = Block(
         city='city',
         street_slug='street_slug',
         pretty_name='pretty_name',
         street_pretty_name='street_pretty_name',
         street='street',
         from_num='123',
         to_num='456',
     )
     mock_get_object_or_404.return_value = block
     chain.add_by_place_id('b:123.1')
     self.assert_(isinstance(chain['location'], BlockFilter))
Esempio n. 26
0
 def _get_filterchain(self, context):
     filterchain_or_schema = self.filterchain_var.resolve(context)
     if isinstance(filterchain_or_schema, FilterChain):
         filterchain = filterchain_or_schema
     elif isinstance(filterchain_or_schema, Schema):
         # Note, context['request'] only works if
         # django.core.context_processors.request is enabled in
         # TEMPLATE_CONTEXT_PROCESSORS.
         filterchain = FilterChain(context=context, request=context['request'],
                                   schema=filterchain_or_schema)
     else:
         raise template.TemplateSyntaxError(
             "%r is neither a FilterChain nor a Schema" % filterchain_or_schema)
     if self.clear:
         filterchain = filterchain.copy()
         filterchain.clear()
     return filterchain
Esempio n. 27
0
    def test_sort(self):
        class Dummy(object):
            def __init__(self, sort_value):
                self._sort_value = sort_value

        dummies = [Dummy(i) for i in range(10)]
        random.shuffle(dummies)
        chain = FilterChain()
        for i in range(10):
            chain[i] = dummies[i]

        self.assertNotEqual(range(10), [v._sort_value for v in chain.values()])

        normalized = chain.copy()
        normalized.sort()
        self.assertEqual(range(10),
                         [v._sort_value for v in normalized.values()])
Esempio n. 28
0
 def test_no_duplicates(self):
     from ebpub.db.schemafilters import DuplicateFilterError
     self.assertRaises(DuplicateFilterError, FilterChain,
                       (('foo', 'bar'), ('foo', 'bar2')))
     chain = FilterChain()
     chain['foo'] = 'bar'
     self.assertRaises(DuplicateFilterError, chain.__setitem__, 'foo',
                       'bar')
Esempio n. 29
0
 def _get_filterchain(self, context):
     filterchain_or_schema = self.filterchain_var.resolve(context)
     if isinstance(filterchain_or_schema, FilterChain):
         filterchain = filterchain_or_schema
     elif isinstance(filterchain_or_schema, Schema):
         # Note, context['request'] only works if
         # django.core.context_processors.request is enabled in
         # TEMPLATE_CONTEXT_PROCESSORS.
         filterchain = FilterChain(context=context, request=context.get('request'),
                                   schema=filterchain_or_schema)
     else:
         raise template.TemplateSyntaxError(
             "%r is neither a FilterChain nor a Schema" % filterchain_or_schema)
     if self.clear:
         filterchain = filterchain.copy()
         filterchain.clear()
     return filterchain
Esempio n. 30
0
    def test_add_by_place_id(self, mock_get_object_or_404):
        chain = FilterChain()
        from ebpub.streets.models import Block
        from ebpub.db.schemafilters import BlockFilter

        block = Block(
            city="city",
            street_slug="street_slug",
            pretty_name="pretty_name",
            street_pretty_name="street_pretty_name",
            street="street",
            from_num="123",
            to_num="456",
        )
        mock_get_object_or_404.return_value = block
        chain.add_by_place_id("b:123.1")
        self.assert_(isinstance(chain["location"], BlockFilter))
Esempio n. 31
0
def ajax_place_lookup_chart(request):
    """
    Returns HTML fragment -- expects request.GET['pid'] and request.GET['sf'] (a SchemaField ID).
    """
    try:
        sf = SchemaField.objects.select_related().get(id=int(request.GET['sf']), schema__is_public=True)
    except (KeyError, ValueError, SchemaField.DoesNotExist):
        raise Http404('Invalid SchemaField')
    filters = FilterChain(request=request, schema=sf.schema)
    filters.add_by_place_id(request.GET.get('pid', ''))
    qs = filters.apply()
    total_count = qs.count()
    top_values = qs.top_lookups(sf, 10)
    return render_to_response('db/snippets/lookup_chart.html', {
        'lookup': {'sf': sf, 'top_values': top_values},
        'total_count': total_count,
        'schema': sf.schema,
        'filters': filters,
    })
Esempio n. 32
0
    def test_sort__real_filters(self):
        req = mock.Mock()
        qs = mock.Mock()
        schema = mock.Mock()
        context = {'newsitem_qs': qs, 'schema': schema}
        from ebpub.db.schemafilters import TextSearchFilter, BoolFilter
        from ebpub.db.schemafilters import LookupFilter, LocationFilter
        from ebpub.db.schemafilters import DateFilter

        def mock_schemafield(name):
            # mock.Mock(name='foo') does something magic, but I just
            # want to set the name attribute.
            sf = mock.Mock()
            sf.name = name
            return sf

        all_filters = [
            TextSearchFilter(
                req,
                context,
                qs,
                'hi',
                schemafield=mock_schemafield(name='mock text sf')),
            BoolFilter(req,
                       context,
                       qs,
                       'yes',
                       schemafield=mock_schemafield(name='mock bool sf')),
            LookupFilter(req,
                         context,
                         qs,
                         schemafield=mock_schemafield(name='mock lookup sf')),
            LocationFilter(req, context, qs, 'neighborhoods'),
            DateFilter(req, context, qs, '2011-04-11', '2011-04-12'),
        ]
        chain = FilterChain([(item.slug, item) for item in all_filters])
        ordered_chain = chain.copy()
        ordered_chain.sort()
        self.assertEqual(ordered_chain.keys(), [
            'date', 'mock bool sf', 'location', 'mock lookup sf',
            'mock text sf'
        ])
Esempio n. 33
0
def bigmap_filter(request, slug):

    s = get_object_or_404(get_schema_manager(request), slug=slug, is_special_report=False)
    if not s.allow_charting:
        return HttpResponse(status=404)

    filter_sf_dict = _get_filter_schemafields(s)

    # Determine what filters to apply, based on path and/or query string.
    filterchain = FilterChain(request=request, schema=s)
    try:
        filterchain.update_from_request(filter_sf_dict)
        filters_need_more = filterchain.validate()
    except:
        logger.exception("Unhandled error")
        return HttpResponse(status=404)

    config = _decode_map_permalink(request, show_default_layers=False, filters=filterchain)

    new_url = filterchain.make_url(base_url=reverse("bigmap_filter", args=(slug,)))
    if new_url != request.get_full_path():
        return HttpResponseRedirect(new_url)

    # add in the filter layer
    base_url = reverse("ebpub-schema-filter-geojson", args=(slug,))
    layer_url = filterchain.make_url(base_url=base_url)
    custom_layer = {"url": layer_url, "params": {}, "title": "Custom Filter", "visible": True}
    config["layers"].append(custom_layer)

    if config["is_widget"]:
        return eb_render(request, "richmaps/embed_bigmap.html", {"map_config": simplejson.dumps(config, indent=2)})
    else:
        return eb_render(request, "richmaps/bigmap.html", {"map_config": simplejson.dumps(config, indent=2)})
Esempio n. 34
0
 def render(self, context):
     filterchain = self.filterchain_var.resolve(context)
     if isinstance(filterchain, FilterChain):
         schema = filterchain.schema
     elif isinstance(filterchain, Schema):
         schema = filterchain
         # Note, context['request'] only works if
         # django.core.context_processors.request is enabled in
         # TEMPLATE_CONTEXT_PROCESSORS.
         filterchain = FilterChain(context=context,
                                   request=context['request'],
                                   schema=schema)
     else:
         raise template.TemplateSyntaxError(
             "%r is neither a FilterChain nor a Schema" % filterchain)
     removals = [r.resolve(context) for r in self.removals]
     if self.clear:
         filterchain = filterchain.copy()
         filterchain.clear()
     additions = []
     for key, values in self.additions:
         key = key.resolve(context)
         additions.append((key, [v.resolve(context) for v in values]))
     schema = filterchain.schema
     return filterchain.make_url(additions=additions, removals=removals)
Esempio n. 35
0
def bigmap(request):
    '''
    Big map with all Schemas enabled by default.
    '''
    filterchain = FilterChain(request=request)
    config = _decode_map_permalink(request, filters=filterchain)

    if config['is_widget']:
        return eb_render(request, 'richmaps/embed_bigmap.html',
                         {'map_config': simplejson.dumps(config, indent=2)})
    else:
        return eb_render(request, 'richmaps/bigmap.html',
                         {'map_config': simplejson.dumps(config, indent=2)})
Esempio n. 36
0
 def test_add__pubdate(self):
     # Key gets overridden.
     chain = FilterChain()
     import datetime
     chain.add('pubdate', datetime.date(2011, 8, 13))
     self.assert_(chain.has_key('date'))
     self.failIf(chain.has_key('pubdate'))
     self.assertEqual(chain['date'].start_date, datetime.date(2011, 8, 13))
Esempio n. 37
0
def bigmap_filter(request, slug, args_from_url):

    s = get_object_or_404(get_schema_manager(request),
                          slug=slug,
                          is_special_report=False)
    if not s.allow_charting:
        return HttpResponse(status=404)

    filter_sf_list = list(
        SchemaField.objects.filter(schema__id=s.id,
                                   is_filter=True).order_by('display_order'))
    textsearch_sf_list = list(
        SchemaField.objects.filter(
            schema__id=s.id, is_searchable=True).order_by('display_order'))

    # Use SortedDict to preserve the display_order.
    filter_sf_dict = SortedDict([(sf.name, sf) for sf in filter_sf_list] +
                                [(sf.name, sf) for sf in textsearch_sf_list])

    # Determine what filters to apply, based on path and/or query string.
    filterchain = FilterChain(request=request, schema=s)
    try:
        filterchain.update_from_request(args_from_url, filter_sf_dict)
        filters_need_more = filterchain.validate()
    except:
        return HttpResponse(status=404)

    config = _decode_map_permalink(request,
                                   show_default_layers=False,
                                   filters=filterchain)

    new_url = filterchain.make_url(
        base_url=reverse('bigmap_filter', args=(slug, )))
    if new_url != request.get_full_path():
        return HttpResponseRedirect(new_url)

    # add in the filter layer
    base_url = reverse('ebpub-schema-filter-geojson', args=(slug, ))
    layer_url = filterchain.make_url(base_url=base_url)
    custom_layer = {
        'url': layer_url,
        'params': {},
        'title': "Custom Filter",
        'visible': True
    }
    config['layers'].append(custom_layer)

    if config['is_widget']:
        return eb_render(request, 'richmaps/embed_bigmap.html',
                         {'map_config': simplejson.dumps(config, indent=2)})
    else:
        return eb_render(request, 'richmaps/bigmap.html',
                         {'map_config': simplejson.dumps(config, indent=2)})
Esempio n. 38
0
def bigmap_filter(request, slug):
    """
    Big map with just one Schema (identified by ``slug``) enabled by
    default.
    """
    s = get_object_or_404(get_schema_manager(request), slug=slug, is_special_report=False)
    if not s.allow_charting:
        return HttpResponse(status=404)

    filter_sf_dict = _get_filter_schemafields(s)

    # Determine what filters to apply, based on path and/or query string.
    filterchain = FilterChain(request=request, schema=s)
    try:
        filterchain.update_from_request(filter_sf_dict)
        filters_need_more = filterchain.validate()
    except:
        logger.exception("Unhandled error")
        return HttpResponse(status=404)

    config = _decode_map_permalink(request, show_default_layers=False, filters=filterchain)


    # TODO: This can leave in permalink params eg. 'i', even if there
    # is also 'ids', because it doesn't recognize those as being the
    # same.
    new_url = filterchain.make_url(base_url=reverse('bigmap_filter', args=(slug,)))
    if new_url != request.get_full_path():
        return HttpResponseRedirect(new_url)

    if config['is_widget']:
        return eb_render(request, 'richmaps/embed_bigmap.html', {
            'map_config': simplejson.dumps(config, indent=2)
        })
    else:
        return eb_render(request, 'richmaps/bigmap.html', {
            'map_config': simplejson.dumps(config, indent=2)
        })
Esempio n. 39
0
 def render(self, context):
     filterchain = self.filterchain_var.resolve(context)
     if isinstance(filterchain, FilterChain):
         schema = filterchain.schema
     elif isinstance(filterchain, Schema):
         schema = filterchain
         # Note, context['request'] only works if
         # django.core.context_processors.request is enabled in
         # TEMPLATE_CONTEXT_PROCESSORS.
         filterchain = FilterChain(context=context, request=context['request'],
                                   schema=schema)
     else:
         raise template.TemplateSyntaxError(
             "%r is neither a FilterChain nor a Schema" % filterchain)
     removals = [r.resolve(context) for r in self.removals]
     if self.clear:
         filterchain = filterchain.copy()
         filterchain.clear()
     additions = []
     for key, values in self.additions:
         key = key.resolve(context)
         additions.append((key, [v.resolve(context) for v in values]))
     schema = filterchain.schema
     return filterchain.make_url(additions=additions, removals=removals)
Esempio n. 40
0
 def value_list(self):
     """
     Returns a list of {value, url, description} dictionaries
     representing each value for this attribute.
     """
     from django.utils.dateformat import format, time_format
     # Setting these to [None] ensures that zip() returns a list
     # of at least length one.
     urls = [None]
     descriptions = [None]
     if self.is_filter:
         from ebpub.db.schemafilters import FilterChain
         chain = FilterChain(schema=self.sf.schema)
         if self.is_lookup:
             urls = [chain.replace(self.sf, look).make_url() if look else None
                     for look in self.values]
         else:
             urls = [chain.replace(self.sf, self.raw_value).make_url()]
     if self.is_lookup:
         values = [val and val.name or 'None' for val in self.values]
         descriptions = [val and val.description or None for val in self.values]
     elif isinstance(self.raw_value, datetime.datetime):
         values = [format(self.raw_value, 'F j, Y, P')]
     elif isinstance(self.raw_value, datetime.date):
         values = [format(self.raw_value, 'F j, Y')]
     elif isinstance(self.raw_value, datetime.time):
         values = [time_format(self.raw_value, 'P')]
     elif self.raw_value is True:
         values = ['Yes']
     elif self.raw_value is False:
         values = ['No']
     elif self.raw_value is None:
         values = ['N/A']
     else:
         values = [self.raw_value]
     return [{'value': value, 'url': url, 'description': description} for value, url, description in zip(values, urls, descriptions)]
Esempio n. 41
0
def ajax_place_date_chart(request):
    """
    Returns HTML fragment containing a chart of how many news items
    were added for each day over a short period (length defined by
    constants.DAYS_SHORT_AGGREGATE_TIMEDELTA).

    Expects request.GET['pid'] and request.GET['s'] (a Schema ID).
    """
    manager = get_schema_manager(request)
    try:
        schema = manager.get(id=int(request.GET['s']))
    except (KeyError, ValueError, Schema.DoesNotExist):
        raise Http404('Invalid Schema')
    filters = FilterChain(request=request, schema=schema)
    filters.add_by_place_id(request.GET.get('pid', ''))
    qs = filters.apply()

    # These charts are used on eg. the place overview page; there,
    # they should be smaller than the ones on the schema_detail view;
    # we don't have room for a full 30 days.
    date_span = constants.DAYS_SHORT_AGGREGATE_TIMEDELTA
    if schema.is_event:
        # Soonest span that includes some.
        try:
            qs = qs.filter(item_date__gte=today()).order_by('item_date', 'id')
            first_item = qs.values('item_date')[0]
            start_date = first_item['item_date']
        except IndexError:  # No matching items.
            start_date = today()
        end_date = today() + date_span
    else:
        # Most recent span that includes some.
        try:
            qs = qs.filter(item_date__lte=today()).order_by('-item_date', '-id')
            last_item = qs.values('item_date')[0]
            end_date = last_item['item_date']
        except IndexError:  # No matching items.
            end_date = today()
        start_date = end_date - date_span

    filters.add('date', start_date, end_date)
    counts = filters.apply().date_counts()
    date_chart = get_date_chart([schema], start_date, end_date, {schema.id: counts})[0]
    return render_to_response('db/snippets/date_chart.html', {
        'schema': schema,
        'date_chart': date_chart,
        'filters': filters,
    })
Esempio n. 42
0
def bigmap_filter(request, slug, args_from_url):
    
    s = get_object_or_404(get_schema_manager(request), slug=slug, is_special_report=False)
    if not s.allow_charting:
        return HttpResponse(status=404)

    filter_sf_list = list(SchemaField.objects.filter(schema__id=s.id, is_filter=True).order_by('display_order'))
    textsearch_sf_list = list(SchemaField.objects.filter(schema__id=s.id, is_searchable=True).order_by('display_order'))

    # Use SortedDict to preserve the display_order.
    filter_sf_dict = SortedDict([(sf.name, sf) for sf in filter_sf_list] + [(sf.name, sf) for sf in textsearch_sf_list])

    # Determine what filters to apply, based on path and/or query string.
    filterchain = FilterChain(request=request, schema=s)
    try:
        filterchain.update_from_request(args_from_url, filter_sf_dict)
        filters_need_more = filterchain.validate()
    except:
        return HttpResponse(status=404)
        
    config = _decode_map_permalink(request, show_default_layers=False, filters=filterchain)

    new_url = filterchain.make_url(base_url=reverse('bigmap_filter', args=(slug,)))
    if new_url != request.get_full_path():
        return HttpResponseRedirect(new_url)    

    
    # add in the filter layer
    base_url = reverse('ebpub-schema-filter-geojson', args=(slug,))
    layer_url = filterchain.make_url(base_url=base_url)
    custom_layer = {
        'url': layer_url,
        'params': {},
        'title': "Custom Filter",
        'visible': True
    }
    config['layers'].append(custom_layer)



    if config['is_widget']: 
        return eb_render(request, 'richmaps/embed_bigmap.html', {
            'map_config': simplejson.dumps(config, indent=2)
        })
    else:         
        return eb_render(request, 'richmaps/bigmap.html', {
            'map_config': simplejson.dumps(config, indent=2)
        })
Esempio n. 43
0
def bigmap_filter(request, slug):
    """
    Big map with just one Schema (identified by ``slug``) enabled by
    default.
    """
    s = get_object_or_404(get_schema_manager(request),
                          slug=slug,
                          is_special_report=False)
    if not s.allow_charting:
        return HttpResponse(status=404)

    filter_sf_dict = _get_filter_schemafields(s)

    # Determine what filters to apply, based on path and/or query string.
    filterchain = FilterChain(request=request, schema=s)
    try:
        filterchain.update_from_request(filter_sf_dict)
        filters_need_more = filterchain.validate()
    except:
        logger.exception("Unhandled error")
        return HttpResponse(status=404)

    config = _decode_map_permalink(request,
                                   show_default_layers=False,
                                   filters=filterchain)

    # TODO: This can leave in permalink params eg. 'i', even if there
    # is also 'ids', because it doesn't recognize those as being the
    # same.
    new_url = filterchain.make_url(
        base_url=reverse('bigmap_filter', args=(slug, )))
    if new_url != request.get_full_path():
        return HttpResponseRedirect(new_url)

    if config['is_widget']:
        return eb_render(request, 'richmaps/embed_bigmap.html',
                         {'map_config': simplejson.dumps(config, indent=2)})
    else:
        return eb_render(request, 'richmaps/bigmap.html',
                         {'map_config': simplejson.dumps(config, indent=2)})
Esempio n. 44
0
 def test_empty(self):
     chain = FilterChain()
     self.assertEqual(chain.items(), [])
Esempio n. 45
0
def place_detail_timeline(request, *args, **kwargs):
    """
    Recent news OR upcoming events for the given Location or Block.
    """
    context, response = _place_detail_normalize_url(request, *args, **kwargs)
    if response is not None:
        return response

    show_upcoming = kwargs.get('show_upcoming')
    schema_manager = get_schema_manager(request)

    if show_upcoming:
        context['breadcrumbs'] = breadcrumbs.place_detail_upcoming(context)
    else:
        context['breadcrumbs'] = breadcrumbs.place_detail_timeline(context)

    is_latest_page = True
    # Check the query string for the max date to use. Otherwise, fall
    # back to today.
    end_date = today()
    if 'start' in request.GET:
        try:
            end_date = parse_date(request.GET['start'], '%m/%d/%Y')
            is_latest_page = False
        except ValueError:
            raise Http404('Invalid date %s' % request.GET['start'])

    filterchain = FilterChain(request=request, context=context)
    filterchain.add('location', context['place'])
    # As an optimization, limit the NewsItems to those on the
    # last (or next) few days.
    # And only fetch for relevant schemas - either event-ish or not.
    if show_upcoming:
        s_list = schema_manager.filter(is_event=True)
        start_date = end_date
        end_date = start_date + datetime.timedelta(days=settings.DEFAULT_DAYS)
        order_by = 'item_date_date'
    else:
        s_list = schema_manager.filter(is_event=False)
        start_date = end_date - datetime.timedelta(days=settings.DEFAULT_DAYS)
        order_by = '-item_date_date'

    filterchain.add('schema', list(s_list))
    filterchain.add('date', start_date, end_date)
    newsitem_qs = filterchain.apply().select_related()
    # TODO: can this really only be done via extra()?
    newsitem_qs = newsitem_qs.extra(
        select={'item_date_date': 'date(db_newsitem.item_date)'},
        order_by=(order_by, '-schema__importance', 'schema')
    )[:constants.NUM_NEWS_ITEMS_PLACE_DETAIL]

    # We're done filtering, so go ahead and do the query, to
    # avoid running it multiple times,
    # per http://docs.djangoproject.com/en/dev/topics/db/optimization
    ni_list = list(newsitem_qs)
    schemas_used = list(set([ni.schema for ni in ni_list]))
    s_list = s_list.filter(is_special_report=False, allow_charting=True).order_by('plural_name')
    populate_attributes_if_needed(ni_list, schemas_used)
    if ni_list:
        next_day = ni_list[-1].item_date - datetime.timedelta(days=1)
    else:
        next_day = None

    hidden_schema_list = []
    if not request.user.is_anonymous():
        hidden_schema_list = [o.schema for o in HiddenSchema.objects.filter(user_id=request.user.id)]

    context.update({
        'newsitem_list': ni_list,
        'next_day': next_day,
        'is_latest_page': is_latest_page,
        'hidden_schema_list': hidden_schema_list,
        'bodyclass': 'place-detail-timeline',
        'bodyid': context.get('place_type') or '',
        'filters': filterchain,
        'show_upcoming': show_upcoming,
    })


    context['filtered_schema_list'] = s_list
    context['map_configuration'] = _preconfigured_map(context);
    response = eb_render(request, 'db/place_detail.html', context)
    for k, v in context['cookies_to_set'].items():
        response.set_cookie(k, v)
    return response
Esempio n. 46
0
def schema_filter_geojson(request, slug, args_from_url):
    s = get_object_or_404(get_schema_manager(request), slug=slug, is_special_report=False)
    if not s.allow_charting:
        return HttpResponse(status=404)

    filter_sf_list = list(SchemaField.objects.filter(schema__id=s.id, is_filter=True).order_by('display_order'))
    textsearch_sf_list = list(SchemaField.objects.filter(schema__id=s.id, is_searchable=True).order_by('display_order'))

    # Use SortedDict to preserve the display_order.
    filter_sf_dict = SortedDict([(sf.name, sf) for sf in filter_sf_list] + [(sf.name, sf) for sf in textsearch_sf_list])

    # Determine what filters to apply, based on path and/or query string.
    filterchain = FilterChain(request=request, schema=s)
    try:
        filterchain.update_from_request(args_from_url, filter_sf_dict)
        filters_need_more = filterchain.validate()
    except FilterError:
        return HttpResponse(status=400)
    except BadAddressException:
        return HttpResponse(status=400)
    except BadDateException:
        return HttpResponse(status=400)

    if filters_need_more:
        return HttpResponse(status=400)


    # If there isn't a date filter, add some dates to the queryset,
    # but NOT to the filterchain, because need to give the user the
    # option of choosing dates.
    qs, start_date, end_date = _default_date_filtering(filterchain)

    if s.is_event:
        qs = qs.order_by('item_date', 'id')
    else:
        qs = qs.order_by('-item_date', '-id')

    page = request.GET.get('page', None)
    if page is not None:
        try:
            page = int(page)
            idx_start = (page - 1) * constants.FILTER_PER_PAGE
            idx_end = page * constants.FILTER_PER_PAGE
            # Get one extra, so we can tell whether there's a next page.
            idx_end += 1
        except ValueError:
            return HttpResponse('Invalid Page', status=400)
    else:
        idx_start, idx_end = 0, 1000
    qs = qs[idx_start:idx_end]

    cache_key = 'schema_filter_geojson:' + _make_cache_key_from_queryset(qs)
    cache_seconds = 60 * 5
    output = cache.get(cache_key, None)
    if output is None:
        output = api_items_geojson(list(qs))
        cache.set(cache_key, output, cache_seconds)

    response = HttpResponse(output, mimetype="application/javascript")
    patch_response_headers(response, cache_timeout=60 * 5)
    return response
Esempio n. 47
0
def newsitems_geojson(request):
    """Get a list of newsitems, optionally filtered for one place ID
    and/or one schema slug.

    Response is a geojson string.
    """
    # Note: can't use @cache_page here because that ignores all requests
    # with query parameters (in FetchFromCacheMiddleware.process_request).
    # So, we'll use the low-level cache API.

    # Copy-pasted code from ajax_place_newsitems.  Refactoring target:
    # Seems like there are a number of similar code blocks in
    # ebpub.db.views?

    pid = request.GET.get('pid', '')
    schema = request.GET.get('schema', None)
    if schema is not None:
        schema = get_object_or_404(Schema, slug=schema)

    nid = request.GET.get('newsitem', '')

    newsitem_qs = NewsItem.objects.all()
    if nid:
        newsitem_qs = newsitem_qs.filter(id=nid)
    else:
        filters = FilterChain(request=request, queryset=newsitem_qs, schema=schema)
        if pid:
            filters.add_by_place_id(pid)
        else:
            # Whole city!
            pass

        # More copy/paste from ebpub.db.views...
        # As an optimization, limit the NewsItems to those published in the
        # last few days.
        filters.update_from_query_params(request)
        if not filters.has_key('date'):
            end_date = today()
            start_date = end_date - datetime.timedelta(days=settings.DEFAULT_DAYS)
            filters.add('date', start_date, end_date)
        newsitem_qs = filters.apply()
        newsitem_qs = newsitem_qs
        if not has_staff_cookie(request):
            newsitem_qs = newsitem_qs.filter(schema__is_public=True)

        # Put a hard limit on the number of newsitems, and throw away
        # older items.
        newsitem_qs = newsitem_qs.select_related().order_by('-item_date', '-id')
        newsitem_qs = newsitem_qs[:constants.NUM_NEWS_ITEMS_PLACE_DETAIL]

    # Done preparing the query; cache based on the raw SQL
    # to be sure we capture everything that matters.
    cache_seconds = 60 * 5
    cache_key = 'newsitem_geojson:' + _make_cache_key_from_queryset(newsitem_qs)
    output = cache.get(cache_key, None)
    if output is None:
        newsitem_list = list(newsitem_qs)
        output = api_items_geojson(newsitem_list)
        cache.set(cache_key, output, cache_seconds)

    response = HttpResponse(output, mimetype="application/javascript")
    patch_response_headers(response, cache_timeout=60 * 5)
    return response
Esempio n. 48
0
 def test_add__bogus_keyword_arg(self):
     chain = FilterChain()
     self.assertRaises(TypeError, chain.add, 'date', '2011-01-1', foo='bar')
Esempio n. 49
0
def place_detail_overview(request, *args, **kwargs):
    context, response = _place_detail_normalize_url(request, *args, **kwargs)
    if response is not None:
        return response
    schema_manager = get_schema_manager(request)
    context['breadcrumbs'] = breadcrumbs.place_detail_overview(context)

    schema_list = SortedDict([(s.id, s) for s in schema_manager.filter(is_special_report=False).order_by('plural_name')])
    # needed = set(schema_list.keys())

    # We actually want two lists of schemas, since we care whether
    # they are news-like or future-event-like.
    import copy
    eventish_schema_list = copy.deepcopy(schema_list)
    newsish_schema_list = copy.deepcopy(schema_list)
    for s_id, schema in schema_list.items():
        if schema.is_event:
            del(newsish_schema_list[s_id])
        else:
            del(eventish_schema_list[s_id])

    filterchain = FilterChain(request=request, context=context)
    filterchain.add('location', context['place'])

    # Distinguish between past news and upcoming events.
    # With some preliminary date limiting too.
    filterchain_news = filterchain.copy()
    filterchain_news.add('date',
                         today() - datetime.timedelta(days=90),
                         today())

    filterchain_events = filterchain.copy()
    filterchain_events.add('date',
                           today(),
                           today() + datetime.timedelta(days=60))

    # Ordering by ID ensures consistency across page views.
    newsitem_qs = filterchain_news.apply().order_by('-item_date', '-id')
    events_qs = filterchain_events.apply().order_by('item_date', 'id')

    # Mapping of schema id -> [schemafields], for building Lookup charts.
    sf_dict = {}
    charted_lookups = SchemaField.objects.filter(
        is_lookup=True, is_charted=True, schema__is_public=True,
        schema__is_special_report=False)
    charted_lookups = charted_lookups.values('id', 'schema_id', 'pretty_name')
    for sf in charted_lookups.order_by('schema__id', 'display_order'):
        sf_dict.setdefault(sf['schema_id'], []).append(sf)

    # Now retrieve newsitems per schema.
    schema_groups, all_newsitems = [], []
    for schema in schema_list.values():
        if schema.id in newsish_schema_list:
            newsitems = newsitem_qs.filter(schema__id=schema.id)
        elif schema.id in eventish_schema_list:
            newsitems = events_qs.filter(schema__id=schema.id)
        else:
            raise RuntimeError("should never get here")
        newsitems = list(newsitems[:s.number_in_overview])
        populate_schema(newsitems, schema)
        schema_groups.append({
            'schema': schema,
            'latest_newsitems': newsitems,
            'has_newsitems': bool(newsitems),
            'lookup_charts': sf_dict.get(schema.id),
        })
        all_newsitems.extend(newsitems)
    schema_list = schema_list.values()
    populate_attributes_if_needed(all_newsitems, schema_list)
    schema_list = [s for s in schema_list if s.allow_charting]

    context['schema_groups'] = schema_groups
    context['filtered_schema_list'] = schema_list
    context['bodyclass'] = 'place-detail-overview'
    if context['is_block']:
        context['bodyid'] = '%s-%s-%s' % (context['place'].street_slug,
                                          context['place'].number(),
                                          context['place'].dir_url_bit())
    else:
        context['bodyid'] = context['location'].slug
    response = eb_render(request, 'db/place_overview.html', context)
    for k, v in context['cookies_to_set'].items():
        response.set_cookie(k, v)
    return response
Esempio n. 50
0
 def test_add__no_value(self):
     chain = FilterChain()
     self.assertRaises(FilterError, chain.add, 'date')
Esempio n. 51
0
 def test_add__id(self):
     from ebpub.db.schemafilters import IdFilter
     chain = FilterChain()
     chain.add('id', [1, 2, 3])
     self.assert_(isinstance(chain['id'], IdFilter))
Esempio n. 52
0
def _decode_map_permalink(request, show_default_layers=True, filters=None):
    """
    Permalinks for the big map, with more compact query parameters.

    Returns a map_config dictionary.

    Accepted parameters:

    c - map center, separated by underscore, eg. c=-92.28283_38.95658

    z - map zoom, eg. z=12

    l - layers to display on load, comma- or dash-separated,
        eg. l=p13,t32,p1 or eg. l=p12345-t7-t9,
        where p => place layer
        and t => schema ("type") layer

    i - items to load specificially by id, comma- or dash-separated,
        eg. i=t1234-t456

    p - popup center, with underscore, eg. p=-92.3438_38.9658
    f - popup feature, eg. f=t1234 or f=p1234
        where p = a place and t = a news item

    start_date - start date (inclusive) %m/%d/%Y
    end_date - end date (inclusive) %m/%d/%Y
    d - duration in days (overridden by end date), eg. d=7

    x - show as 'widget', just the map and nothign around it.
        Takes no value, eg. x
    w - width of map (widget only), in pixels
    h - height of map (widget only), in pixels
    v - limits what map controls are displayed (widget only).
        By default, widget-stype map shows none of these.
        Possible values, joined with no separator:
        l - layer switcher
        h - list of headlines next to map
        p - permalink
        eg. to turn them all on: v=lhp


    """
    params = request.GET
    schemas = set()
    place_types = set()
    lids = params.get("l", None)
    show_custom_layer = False
    if lids is not None:
        no_layers_specified = False
        try:
            pat = re.compile('(\w\d+)')
            for lid in pat.findall(lids):
                layer_type = lid[0]
                layer_id = int(lid[1:])
                if layer_type == 'p':
                    place_types.add(layer_id)
                elif layer_type == 't':
                    schemas.add(layer_id)
                elif layer_type == 'c':
                    show_custom_layer = True
        except:
            pass
    else:
        no_layers_specified = True

    # map center
    center = params.get("c", None)
    if center:
        try:
            center = [float(x) for x in center.split('_')][0:2]
        except:
            pass

    # map zoom level
    zoom = params.get("z", None)
    if zoom:
        try:
            zoom = float(zoom)
        except:
            pass

    # popup
    popup_info = None
    popup_center = params.get("p", None)
    popup_feature = params.get("f", None)
    if popup_center and popup_feature:
        try:
            popup_center = [float(x) for x in popup_center.split('_')][0:2]
            feature_type = popup_feature[0]
            feature_id = int(popup_feature[1:])
            if feature_type == 'p':
                openblock_type = 'place'
            elif feature_type == 't':
                openblock_type = 'newsitem'

            popup_info = {
                'id': feature_id,
                'openblock_type': openblock_type,
                'lonlat': [popup_center[0], popup_center[1]]
            }
        except:
            popup_center = None
            popup_feature = None

    # start and end date range
    default_interval = datetime.timedelta(days=7)
    duration = params.get('d')
    if duration is not None:
        try:
            duration = datetime.timedelta(days=int(duration))
        except (TypeError, ValueError):
            duration = default_interval
    else:
        duration = default_interval
    default_enddate = datetime.date.today()
    default_startdate = default_enddate - duration

    startdate = params.get('start_date')
    if startdate is not None:
        for format in ('%m/%d/%Y', '%Y-%m-%d'):
            try:
                startdate = datetime.datetime.strptime(startdate,
                                                       format).date()
                break
            except ValueError:
                pass
        if isinstance(startdate, basestring):
            startdate = None

    enddate = params.get('end_date')
    if enddate is not None:
        for format in ('%m/%d/%Y', '%Y-%m-%d'):
            try:
                enddate = datetime.datetime.strptime(enddate, format).date()
                break
            except ValueError:
                pass
        if isinstance(enddate, basestring):
            enddate = None

    # The filters argument can override startdate & enddate.
    if startdate is None and enddate is None and filters:
        date_filter = filters.get('date') or filters.get('pubdate')
        if date_filter:
            startdate = date_filter.start_date
            enddate = date_filter.end_date

    if startdate is None and enddate is None:
        enddate = datetime.date.today()
        startdate = enddate - duration
    elif startdate is None:
        startdate = enddate - duration
    elif enddate is None:
        enddate = startdate + duration

    if enddate < startdate:
        enddate = startdate + duration

    # inject date range into filters if none was specified:
    if filters and filters.get('date') is None:
        filters.add('date', startdate, enddate)

    api_startdate = startdate.strftime("%Y-%m-%d")
    api_enddate = (enddate + datetime.timedelta(days=1)).strftime("%Y-%m-%d")

    layers = []

    if (startdate != default_startdate) or (enddate != default_enddate):
        show_custom_layer = True

    # All available place layers.
    for place_type in PlaceType.objects.filter(is_mappable=True).all():
        layers.append({
            'id':
            'p%d' % place_type.id,
            'title':
            place_type.plural_name,
            'url':
            reverse('place_detail_json', args=[place_type.slug]),
            'params': {
                'limit': 1000
            },
            'minZoom':
            15,
            'bbox':
            True,
            'visible':
            place_type.id in place_types  # off by default
        })

    # All available NewsItem layers.
    for schema in get_schema_manager(request).all():
        # if filters and 'schema' in filters and filters['schema'].schema == schema:
        #     visible = True
        if no_layers_specified and show_default_layers and not show_custom_layer:
            # default on if no 't' param given
            visible = True
        elif schemas and schema.id in schemas:
            visible = True
        else:
            visible = False
        layers.append({
            'id': 't%d' % schema.id,
            'title': schema.plural_name,
            'url': reverse('map_items_json'),
            'params': {
                'type': schema.slug,
                'limit': 1000,
                'startdate': api_startdate,
                'enddate': api_enddate
            },
            'bbox': False,
            'visible': visible
        })

    # Explicit filtering by ID.
    ids = params.get('i') or u''
    ids = [i.strip() for i in re.split(r'[^\d]+', ids) if i.strip()]
    if ids:
        show_custom_layer = True
        if filters is None:
            filters = FilterChain(request)
        filters.replace('id', *ids)

    # 'Custom' layer. This is a catch-all for all filtering
    # that isn't just enabling a default layer with the default
    # date range.
    # Not visible unless there is something like that to show.
    if filters and sorted(filters.keys()) not in ([], ['date'], [
            'date', 'schema'
    ], ['schema']):
        show_custom_layer = True

    if filters is not None:
        # Don't inspect filters['schema']; that's already covered by schemas above.
        base_url = reverse('map_items_json')
        layer_url = filters.make_url(base_url=base_url)
        # Quick ugly hacks to make the itemquery api happy.
        # Hooray proliferation of spellings.
        layer_url = layer_url.replace('locations=', 'locationid=')
        layer_url = layer_url.replace('start_date=', 'startdate=')
        layer_url = layer_url.replace('end_date=', 'enddate=')

        if 'schema' in filters:
            # Normally, filters.make_url() captures the schema in the
            # path part of the URL. But map_items_json doesn't,
            # so we add a query parameter.
            params = {'type': [s.slug for s in filters['schema'].schemas]}
        else:
            params = {}
        custom_layer = {
            'url': layer_url,
            'params': params,
            'title': u"Custom Filter",
            'visible': show_custom_layer,
            'id': 'c1',
        }
        layers.append(custom_layer)

    is_widget = params.get('x', None) is not None
    controls = {}
    control_list = params.get("v", None)
    if control_list is not None:
        if 'l' in control_list:
            controls['layers'] = True
        if 'h' in control_list:
            controls['headline_list'] = True
        if 'p' in control_list:
            controls['permalink'] = True

    width = params.get("w", None)
    if width:
        try:
            width = int(width)
        except:
            width = None

    height = params.get("h", None)
    if height:
        try:
            height = int(height)
        except:
            height = None

    config = {
        'center':
        center
        or [settings.DEFAULT_MAP_CENTER_LON, settings.DEFAULT_MAP_CENTER_LAT],
        'zoom':
        zoom or settings.DEFAULT_MAP_ZOOM,
        'layers':
        layers,
        'is_widget':
        is_widget,
        'permalink_params': {
            'start_date': startdate.strftime('%m/%d/%Y'),
            'end_date': enddate.strftime('%m/%d/%Y'),
        },
    }

    if 'id' in filters:
        # Put them in the params so the js code can construct, well,
        # permalinks with these ids, on the client side.
        ids = '-'.join(map(str, filters['id'].ids))
        config['permalink_params']['i'] = ids
    if popup_info:
        config['popup'] = popup_info

    if is_widget:
        config['controls'] = controls
        if width is not None:
            config['width'] = width
        if height is not None:
            config['height'] = height

    return config
Esempio n. 53
0
def _decode_map_permalink(request, show_default_layers=True, filters=None):
    """
    Permalinks for the big map, with more compact query parameters.

    Returns a map_config dictionary.

    Accepted parameters:

    c - map center, separated by underscore, eg. c=-92.28283_38.95658

    z - map zoom, eg. z=12

    l - layers to display on load, comma- or dash-separated,
        eg. l=p13,t32,p1 or eg. l=p12345-t7-t9,
        where p => place layer
        and t => schema ("type") layer

    i - items to load specificially by id, comma- or dash-separated,
        eg. i=t1234-t456

    p - popup center, with underscore, eg. p=-92.3438_38.9658
    f - popup feature, eg. f=t1234 or f=p1234
        where p = a place and t = a news item

    start_date - start date (inclusive) %m/%d/%Y
    end_date - end date (inclusive) %m/%d/%Y
    d - duration in days (overridden by end date), eg. d=7

    x - show as 'widget', just the map and nothign around it.
        Takes no value, eg. x
    w - width of map (widget only), in pixels
    h - height of map (widget only), in pixels
    v - limits what map controls are displayed (widget only).
        By default, widget-stype map shows none of these.
        Possible values, joined with no separator:
        l - layer switcher
        h - list of headlines next to map
        p - permalink
        eg. to turn them all on: v=lhp


    """
    params = request.GET
    schemas = set()
    place_types = set()
    lids = params.get("l", None)
    show_custom_layer = False
    if lids is not None: 
        no_layers_specified = False
        try:
            pat = re.compile('(\w\d+)')
            for lid in pat.findall(lids):
                layer_type = lid[0]
                layer_id = int(lid[1:])
                if layer_type == 'p': 
                    place_types.add(layer_id)
                elif layer_type == 't': 
                    schemas.add(layer_id)
                elif layer_type == 'c':
                    show_custom_layer = True
        except: 
            pass
    else:
        no_layers_specified = True

    # map center
    center = params.get("c", None)
    if center: 
        try:
            center = [float(x) for x in center.split('_')][0:2]
        except: 
            pass
    
    # map zoom level 
    zoom = params.get("z", None)
    if zoom:
        try:
            zoom = float(zoom)
        except: 
            pass
        
    # popup 
    popup_info = None
    popup_center = params.get("p", None)
    popup_feature = params.get("f", None)
    if popup_center and popup_feature: 
        try:
            popup_center = [float(x) for x in popup_center.split('_')][0:2]
            feature_type = popup_feature[0]
            feature_id = int(popup_feature[1:])
            if feature_type == 'p': 
                openblock_type = 'place'
            elif feature_type == 't': 
                openblock_type = 'newsitem'

            popup_info = {
                'id': feature_id,
                'openblock_type': openblock_type,
                'lonlat': [popup_center[0], popup_center[1]]
            }
        except: 
            popup_center = None
            popup_feature = None

    # start and end date range
    default_interval = datetime.timedelta(days=7)
    duration = params.get('d')
    if duration is not None:
        try:
            duration = datetime.timedelta(days=int(duration))
        except (TypeError, ValueError):
            duration = default_interval
    else:
        duration = default_interval
    default_enddate = datetime.date.today()
    default_startdate = default_enddate - duration

    startdate = params.get('start_date')
    if startdate is not None:
        for format in ('%m/%d/%Y', '%Y-%m-%d'):
            try:
                startdate = datetime.datetime.strptime(startdate, format).date()
                break
            except ValueError:
                pass
        if isinstance(startdate, basestring):
            startdate = None

    enddate = params.get('end_date')
    if enddate is not None:
        for format in ('%m/%d/%Y', '%Y-%m-%d'):
            try:
                enddate = datetime.datetime.strptime(enddate, format).date()
                break
            except ValueError:
                pass
        if isinstance(enddate, basestring):
            enddate = None

    # The filters argument can override startdate & enddate.
    if startdate is None and enddate is None and filters:
        date_filter = filters.get('date') or filters.get('pubdate')
        if date_filter:
            startdate = date_filter.start_date
            enddate = date_filter.end_date

    if startdate is None and enddate is None:
        enddate = datetime.date.today()
        startdate = enddate - duration
    elif startdate is None:
        startdate = enddate - duration
    elif enddate is None:
        enddate = startdate + duration

    if enddate < startdate:
        enddate = startdate + duration

    # inject date range into filters if none was specified:
    if filters and filters.get('date') is None: 
        filters.add('date', startdate, enddate)

    api_startdate = startdate.strftime("%Y-%m-%d")  
    api_enddate = (enddate + datetime.timedelta(days=1)).strftime("%Y-%m-%d")

    layers = []

    if (startdate != default_startdate) or (enddate != default_enddate):
        show_custom_layer = True

    # All available place layers.
    for place_type in PlaceType.objects.filter(is_mappable=True).all():
        layers.append({
            'id': 'p%d' % place_type.id,
            'title': place_type.plural_name,
            'url': reverse('place_detail_json', args=[place_type.slug]),
            'params': {'limit': 1000},
            'minZoom': 15,
            'bbox': True,
            'visible': place_type.id in place_types # off by default
        })

    # All available NewsItem layers.
    for schema in get_schema_manager(request).all():
        # if filters and 'schema' in filters and filters['schema'].schema == schema:
        #     visible = True
        if no_layers_specified and show_default_layers and not show_custom_layer:
            # default on if no 't' param given
            visible = True
        elif schemas and schema.id in schemas:
            visible = True
        else:
            visible = False
        layers.append({
            'id': 't%d' % schema.id,
            'title':  schema.plural_name,
            'url':    reverse('map_items_json'),
            'params': {'type': schema.slug, 'limit': 1000,
                       'startdate': api_startdate,
                       'enddate': api_enddate},
            'bbox': False,
            'visible': visible
        })

    # Explicit filtering by ID.
    ids = params.get('i') or u''
    ids = [i.strip() for i in re.split(r'[^\d]+', ids)
           if i.strip()]
    if ids:
        show_custom_layer = True
        if filters is None:
            filters = FilterChain(request)
        filters.replace('id', *ids)


    # 'Custom' layer. This is a catch-all for all filtering
    # that isn't just enabling a default layer with the default
    # date range.
    # Not visible unless there is something like that to show.
    if filters and sorted(filters.keys()) not in ([],
                                                  ['date'],
                                                  ['date', 'schema'],
                                                  ['schema']):
        show_custom_layer = True

    if filters is not None:
        # Don't inspect filters['schema']; that's already covered by schemas above.
        base_url = reverse('map_items_json')
        layer_url = filters.make_url(base_url=base_url)
        # Quick ugly hacks to make the itemquery api happy.
        # Hooray proliferation of spellings.
        layer_url = layer_url.replace('locations=', 'locationid=')
        layer_url = layer_url.replace('start_date=', 'startdate=')
        layer_url = layer_url.replace('end_date=', 'enddate=')

        if 'schema' in filters:
            # Normally, filters.make_url() captures the schema in the
            # path part of the URL. But map_items_json doesn't,
            # so we add a query parameter.
            params = {'type': [s.slug for s in filters['schema'].schemas]}
        else:
            params = {}
        custom_layer = {
            'url': layer_url,
            'params': params,
            'title': u"Custom Filter",
            'visible': show_custom_layer,
            'id': 'c1',
            }
        layers.append(custom_layer)

    is_widget = params.get('x', None) is not None
    controls = {}
    control_list = params.get("v", None)
    if control_list is not None: 
        if 'l' in control_list: 
            controls['layers'] = True
        if 'h' in control_list: 
            controls['headline_list'] = True
        if 'p' in control_list: 
            controls['permalink'] = True

    width = params.get("w", None)
    if width:
        try:
            width = int(width)
        except: 
            width = None

    height = params.get("h", None)
    if height:
        try:
            height = int(height)
        except: 
            height = None

    config = {
      'center': center or [settings.DEFAULT_MAP_CENTER_LON,
                           settings.DEFAULT_MAP_CENTER_LAT],

      'zoom': zoom or settings.DEFAULT_MAP_ZOOM,
      
      'layers': layers, 
      
      'is_widget': is_widget,
      
      'permalink_params': {
        'start_date': startdate.strftime('%m/%d/%Y'),
        'end_date': enddate.strftime('%m/%d/%Y'),
      }, 
    }

    if 'id' in filters:
        # Put them in the params so the js code can construct, well,
        # permalinks with these ids, on the client side.
        ids = '-'.join(map(str, filters['id'].ids))
        config['permalink_params']['i'] = ids
    if popup_info:
        config['popup'] = popup_info

    if is_widget: 
        config['controls'] = controls
        if width is not None: 
            config['width'] = width
        if height is not None: 
            config['height'] = height
    
    return config
Esempio n. 54
0
 def test_empty(self):
     chain = FilterChain()
     self.assertEqual(chain.items(), [])