Ejemplo n.º 1
0
class RightManager(object):
    
    def __init__(self):
        self.right_cls = Right
        self._levels = SortedDict()
        self._categories = SortedDict()
        self._rights = SortedDict()
    
    def register_level(self, key, title, description=""):
        self._levels[key] = RightLevel(self, key, title, description)

    def register_category(self, key, title, description=""):
        self._categories[key] = RightCategory(self, key, title, description)
    
    def register(self, key, title, description="", category="", level=""):
        self._rights[key]=Right(self, key, title, description=description, category=category, level=level)
        
    def get_category(self, category_key):
        return self._categories.get(category_key, None)

    def get_level(self, level_key):
        return self._levels.get(level_key, None)
    
    def get_right(self, key):
        return self._rights.get(key)
        
    def get_rights(self):
        return [self.get_right(key) for key in self._rights.keys()]

    def get_rights_dict(self):
        return [self.get_right(key).to_dict() for key in self._rights.keys()]
Ejemplo n.º 2
0
 def js_options(self):
     options = deepcopy(self._meta.options)
     columns = self.bound_columns
     aoColumnDefs = options.setdefault('aoColumnDefs', [])
     colopts = SortedDict()
     for index, name in enumerate(columns.keys()):
         column = columns[name]
         for key, value in column.options.items():
             if not (key, str(value)) in colopts.keys():
                 colopts[(key, str(value))] = {}
                 colopts[(key, str(value))]['targets'] = []
             colopts[(key, str(value))]['targets'] = colopts[(key, str(value))]['targets'] + [index]
             colopts[(key, str(value))]['key'] = key
             colopts[(key, str(value))]['value'] = value
         if column.sort_field != column.display_field and column.sort_field in columns:
             key = 'iDataSort'
             value = columns.keys().index(column.sort_field)
             if not (key, str(value)) in colopts.keys():
                 colopts[(key, str(value))] = {}
                 colopts[(key, str(value))]['targets'] = []
             colopts[(key, str(value))]['targets'] = colopts[(key, str(value))]['targets'] + [index]
             colopts[(key, str(value))]['key'] = key
             colopts[(key, str(value))]['value'] = value
     for kv, values in colopts.items():
         aoColumnDefs.append(dict([(values['key'], values['value']), ('aTargets', values['targets'])]))
     options['sAjaxSource'] = reverse_ajax_source(options.get('sAjaxSource'))
     return mark_safe(dumpjs(options, indent=4, sort_keys=True))
Ejemplo n.º 3
0
    def create_content( self, json_dump ):
        json_list = list( json_dump )
        col_obj = CollectionContentType()
        if not  col_obj.find_one( { "collection_name":self.collection_name} ):
            col_obj.load_json( { "collection_name":self.collection_name, "key_names":{}, "user": ''} )

        data_obj = col_obj.find_one( { "collection_name":self.collection_name} )
        user = data_obj['user']
        previous_keys = data_obj['key_names']
        if not previous_keys:previous_keys=SortedDict()
        pre_keys = previous_keys.keys()

        keys_dict = SortedDict()
        for each in json_list:
            for i,key in enumerate(each.keys()):
                if key == 'user':
                    user = each[key]
                elif key not in keys_dict.keys():
                    keys_dict[key] = i

        for new_key in keys_dict.keys():
            if new_key in pre_keys:
                count = int( previous_keys[new_key] ) + int( keys_dict[new_key] )
                previous_keys[new_key] = count
            else:
                previous_keys[new_key] = keys_dict[new_key]
        row_data = col_obj.find_one( { "collection_name":self.collection_name} )
        row_data['key_names'] = previous_keys
        row_data['user'] = user

        col_obj.update( { "collection_name":self.collection_name} , {"$set":{'key_names':previous_keys, 'user': user}} )
Ejemplo n.º 4
0
def model_factory(model, *args, **kwargs):
    ''' Simple object fabric for tests '''
    save = kwargs.pop('save', False)
    num = kwargs.pop('num', 1)
    kwargs = SortedDict(kwargs)
    if kwargs and not isinstance(kwargs.values()[0], list):
        for key in kwargs:
            kwargs[key] = [kwargs[key]]

    def _create_model_obj(**_kwargs):
        ''' Create or build object '''
        if save:
            return model.objects.create(*args, **_kwargs)
        return model(*args, **_kwargs)

    models = DebugList(fields=kwargs.keys())
    if kwargs:
        model_kwargs = map(lambda value: dict(zip(kwargs.keys(), value)),
                       zip(*kwargs.values()))
        for model_kw in model_kwargs * num:
            models.append(_create_model_obj(**model_kw))
    elif num:
        for counter in range(num):
            models.append(_create_model_obj())
    else:
        models.append(_create_model_obj())

    if len(models) == 1:
        return models[0]
    return models
Ejemplo n.º 5
0
 def js_options(self):
     options = deepcopy(self._meta.options)
     aoColumnDefs = options.setdefault('aoColumnDefs', [])
     colopts = SortedDict()
     for index, bcol in enumerate(self.bound_columns.values()):
         for key, value in bcol.options.items():
             if not (key, str(value)) in colopts.keys():
                 colopts[(key, str(value))] = {}
                 colopts[(key, str(value))]['targets'] = []
             coltargets = colopts[(key, str(value))]['targets'] + [index]
             colopts[(key, str(value))]['targets'] = coltargets
             colopts[(key, str(value))]['key'] = key
             colopts[(key, str(value))]['value'] = value
         if bcol.sort_field not in self.bound_columns:
             continue
         if bcol.sort_field == bcol.display_field:
             continue
         key = 'iDataSort'
         value = self.bound_columns.keys().index(bcol.sort_field)
         if not (key, str(value)) in colopts.keys():
             colopts[(key, str(value))] = {}
             colopts[(key, str(value))]['targets'] = []
         coltargets = colopts[(key, str(value))]['targets'] + [index]
         colopts[(key, str(value))]['targets'] = coltargets
         colopts[(key, str(value))]['key'] = key
         colopts[(key, str(value))]['value'] = value
     for kv, values in colopts.items():
         aoColumnDefs.append(dict([(values['key'], values['value']),
                                   ('aTargets', values['targets'])]))
     return mark_safe(dumpjs(options, indent=4, sort_keys=True))
    def expenditures(self):
        print 'working on expenditures'
        csv_name = 'expenditures.csv'
        outfile_path = os.path.join(self.data_dir,  csv_name)
        outfile = open(outfile_path, 'w')

        header_translation = SortedDict([
            ('amount', 'amount'),
            ('bakref_tid', 'bakref_tid'),
            ('cmte_id', 'cmte_id'),
            ('committee__filer__name', 'filer'),
            ('committee__filer__filer_id', 'filer_id'),
            ('committee__name', 'committee'),
            ('committee__filer_id_raw', 'committee_id'),
            ('cum_ytd', 'cum_ytd'),
            ('cycle__name', 'cycle'),
            ('entity_cd', 'entity_cd'),
            ('expn_chkno', 'expn_chkno'),
            ('expn_code', 'expn_code'),
            ('expn_date', 'expn_date'),
            ('expn_dscr', 'expn_dscr'),
            ('filing__filing_id_raw', 'filing_id'),
            ('filing__start_date', 'filing_start_date'),
            ('filing__end_date', 'filing_end_date'),
            ('form_type', 'form_type'),
            ('g_from_e_f', 'g_from_e_f'),
            ('id', 'id'),
            ('individual_id', 'individual_id'),
            ('line_item', 'line_item'),
            ('memo_code', 'memo_code'),
            ('memo_refno', 'memo_refno'),
            ('name', 'name'),
            ('org_id', 'org_id'),
            ('payee_adr1', 'payee_adr1'),
            ('payee_adr2', 'payee_adr2'),
            ('payee_city', 'payee_city'),
            ('payee_namf', 'payee_namf'),
            ('payee_naml', 'payee_naml'),
            ('payee_nams', 'payee_nams'),
            ('payee_namt', 'payee_namt'),
            ('payee_st', 'payee_st'),
            ('payee_zip4', 'payee_zip4'),
            ('tran_id', 'tran_id'),
            ('xref_match', 'xref_match'),
            ('xref_schnm', 'xref_schnm'),
        ])
        csv_writer = csvkit.unicsv.UnicodeCSVDictWriter(
            outfile, fieldnames=header_translation.keys(), delimiter='|')
        csv_writer.writerow(header_translation)
        for c in Cycle.objects.all():
            dict_rows = Expenditure.objects.filter(cycle=c).exclude(
                dupe=True).values(*header_translation.keys())
            csv_writer.writerows(dict_rows)
        outfile.close()
        print 'Exported expenditures '
    def handle(self, *app_labels, **options):

        # Activate project's default language
        translation.activate(settings.LANGUAGE_CODE)

        comment = options["comment"]
        batch_size = options["batch_size"]

        verbosity = int(options.get("verbosity", 1))
        app_list = SortedDict()
        # if no apps given, use all installed.
        if len(app_labels) == 0:
            for app in models.get_apps():
                if not app in app_list.keys():
                    app_list[app] = []
                for model_class in models.get_models(app):
                    if not model_class in app_list[app]:
                        app_list[app].append(model_class)
        else:
            for label in app_labels:
                try:
                    app_label, model_label = label.split(".")
                    try:
                        app = models.get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" % app_label)

                    model_class = models.get_model(app_label, model_label)
                    if model_class is None:
                        raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
                    if app in app_list.keys():
                        if app_list[app] and model_class not in app_list[app]:
                            app_list[app].append(model_class)
                    else:
                        app_list[app] = [model_class]
                except ValueError:
                    # This is just an app - no model qualifier.
                    app_label = label
                    try:
                        app = models.get_app(app_label)
                        if not app in app_list.keys():
                            app_list[app] = []
                        for model_class in models.get_models(app):
                            if not model_class in app_list[app]:
                                app_list[app].append(model_class)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" % app_label)
        # Create revisions.
        for app, model_classes in app_list.items():
            for model_class in model_classes:
                self.create_initial_revisions(app, model_class, comment, batch_size, verbosity)

        # Go back to default language
        translation.deactivate()
Ejemplo n.º 8
0
    def render(self, context, instance, placeholder):
        country = context["request"].GET.get("country", None)
        region = context["request"].GET.get("region", None)
        # province = context['request'].GET.get('province', None)

        location_list = (
            Location.objects.all().order_by("country", "region", "province").values("country", "region", "province")
        )

        country_list_all = SortedDict()
        region_list_all = SortedDict()
        province_list_all = SortedDict()

        region_country_all = SortedDict()
        province_region_all = SortedDict()

        for loc in location_list:
            country_list_all[loc["country"]] = None
            region_list_all[loc["region"]] = None
            province_list_all[loc["province"]] = None
            if country and not region:
                if loc["country"] == country:
                    region_country_all[loc["region"]] = None
            if region:
                if loc["region"] == region:
                    province_region_all[loc["province"]] = None

        get_lat_long_url = reverse("admin:get_lat_long_url")
        get_near_locations_url = reverse("admin:get_near_locations_url")
        get_list_locations_url = reverse("admin:get_list_locations_url")
        location_types = LocationType.objects.all()

        region_list = region_list_all.keys()
        region_list.sort()

        province_list = province_list_all.keys()
        province_list.sort()

        context.update(
            {
                "get_lat_long_url": get_lat_long_url,
                "get_near_locations_url": get_near_locations_url,
                "get_list_locations_url": get_list_locations_url,
                "country_list_all": country_list_all.keys(),
                "region_list_all": region_list,
                "province_list_all": province_list,
                "region_country_all": region_country_all.keys(),
                "province_region_all": province_region_all.keys(),
                "instance": instance,
                "distance_choices": DISTANCE_CHOICES,
                "location_types": location_types,
            }
        )
        return context
Ejemplo n.º 9
0
 def traverse(self):
     ret = SortedDict()
     level = self.roots()
     while len(level) > 0:
         nextlevel = SortedDict()
         for node in level:
             if node not in ret:
                 ret[node] = None
                 # add dependencies to the next stack
                 map(nextlevel.__setitem__, self[node], [])
         level = nextlevel.keys()
     return ret.keys()
Ejemplo n.º 10
0
    def make_fields(self, **kwargs):
        if self.localized:
            langs_dict = SortedDict(django_settings.LANGUAGES)
            default_code = django_settings.LANGUAGE_CODE
            default_name = langs_dict[default_code]
            langs_dict.insert(0, default_code, default_name)
            langs = langs_dict.keys()
        else:
            langs = (django_settings.LANGUAGE_CODE,)

        fields = list()
        for lang in langs:
            kwargs['language_code'] = lang
            fields.append(self.make_field(**kwargs))

        #set initial values
        for field in fields:
            lang = field.language_code
            field.initial = self.get_editor_value(lang)

        if self.localized and len(django_settings.LANGUAGES) > 1:
            for field in fields:
                lang_name = unicode(langs_dict[field.language_code])
                field.label += mark_safe(' <span class="lang">(%s)</span>' % lang_name)

        return fields
Ejemplo n.º 11
0
def cleanup_email_addresses(request, addresses):
    """
    Takes a list of EmailAddress instances and cleans it up, making
    sure only valid ones remain, without multiple primaries etc.

    Order is important: e.g. if multiple primary e-mail addresses
    exist, the first one encountered will be kept as primary.
    """
    from .models import EmailAddress
    adapter = get_adapter()
    # Let's group by `email`
    e2a = SortedDict()  # maps email to EmailAddress
    primary_addresses = []
    verified_addresses = []
    primary_verified_addresses = []
    for address in addresses:
        # Pick up only valid ones...
        email = valid_email_or_none(address.email)
        if not email:
            continue
        # ... and non-conflicting ones...
        if (app_settings.UNIQUE_EMAIL
                and EmailAddress.objects
                .filter(email__iexact=email)
                .exists()):
            continue
        a = e2a.get(email.lower())
        if a:
            a.primary = a.primary or address.primary
            a.verified = a.verified or address.verified
        else:
            a = address
            a.verified = a.verified or adapter.is_email_verified(request,
                                                                 a.email)
            e2a[email.lower()] = a
        if a.primary:
            primary_addresses.append(a)
            if a.verified:
                primary_verified_addresses.append(a)
        if a.verified:
            verified_addresses.append(a)
    # Now that we got things sorted out, let's assign a primary
    if primary_verified_addresses:
        primary_address = primary_verified_addresses[0]
    elif verified_addresses:
        # Pick any verified as primary
        primary_address = verified_addresses[0]
    elif primary_addresses:
        # Okay, let's pick primary then, even if unverified
        primary_address = primary_addresses[0]
    elif e2a:
        # Pick the first
        primary_address = e2a.keys()[0]
    else:
        # Empty
        primary_address = None
    # There can only be one primary
    for a in e2a.values():
        a.primary = primary_address.email.lower() == a.email.lower()
    return list(e2a.values()), primary_address
Ejemplo n.º 12
0
class BindingDict(object):
    """
    This dict-like object is used to store fields on a serializer.

    This ensures that whenever fields are added to the serializer we call
    `field.bind()` so that the `field_name` and `parent` attributes
    can be set correctly.
    """
    def __init__(self, serializer):
        self.serializer = serializer
        self.fields = SortedDict()

    def __setitem__(self, key, field):
        self.fields[key] = field
        field.bind(field_name=key, parent=self.serializer)

    def __getitem__(self, key):
        return self.fields[key]

    def __delitem__(self, key):
        del self.fields[key]

    def items(self):
        return self.fields.items()

    def keys(self):
        return self.fields.keys()

    def values(self):
        return self.fields.values()
Ejemplo n.º 13
0
    def structure(self):
        result = SortedDict()
        year = datetime.datetime.now().year
        for n in range(year, year+2):
            result[n] = []

        datapoints_map = {}
        datapoints = DataPoint.objects.filter(determinant=self.instance).order_by('month')
        for datapoint in datapoints:
            datapoints_map[datapoint.month] = datapoint

        for year in result.keys():
            for month in range(1, 13):
                dt_month = datetime.date(year, month, 1)
                di = dict(date=dt_month, target=None, actual=None)

                for value_type in ('target', 'actual'):
                    v = self.data.get('month_%d_%d_%s' % (month, year, value_type), None)
                    if v:
                        try:
                            di[value_type] = int(v)
                        except ValueError:
                            pass

                    if (di[value_type] is None):
                        if datapoints_map.has_key(dt_month):
                            di[value_type] = getattr(datapoints_map[dt_month], value_type)
                        else:
                            di[value_type] = 0

                result[year].append(di)
                
        return result
Ejemplo n.º 14
0
class Node(object):
    """
    A base class representing a navigation or page node and providing some 
    dictionary-like behavior for navigating the tree.
    """

    def __init__(self):
        super(Node, self).__init__()
        self.children = SortedDict()

    def __getitem__(self, key):
        return self.children.__getitem__(key)

    def __iter__(self):
        return self.children.__iter__()

    def __setitem__(self, key, value):
        return self.children.__setitem__(key, value)

    def __unicode__(self):
        return self.title

    def keys(self):
        return self.children.keys()

    def values(self):
        return self.children.values()

    @property
    def is_leaf(self):
        return not bool(self.children)
Ejemplo n.º 15
0
def log_unidadesaude_by_form(request, healthUnit,stDate=365, endDate=0):
	if not request.user.is_authenticated():
		return HttpResponseRedirect(settings.SITE_ROOT + 'admin/')
	now= datetime.now()
	try:
		us = UnidadeSaude.objects.get(pk=int(healthUnit))
	except:
		return HttpResponseNotFound('Id de unidade errado')
	truncate_date = connection.ops.date_trunc_sql('month', 'forms_ficha.data_insercao')
	fichas_report = Ficha.objects.\
                                   filter(
                                            unidadesaude=us,
                                            data_insercao__gte=now -timedelta(days=stDate),
                                            data_insercao__lte=now -timedelta(days=endDate)
                                        ).\
                            extra(select={'month': truncate_date}).\
                            values('formulario__nome', 'month').\
                            annotate(numero_fichas=Count('pk')).\
                            order_by('-month')
	fichas_report = [ dict([
         ('formulario__nome', l['formulario__nome']),
         ('month', datetime.strptime(l['month'].split(' ')[0], '%Y-%m-%d').strftime('%Y%m')),
         ('numero_fichas', l['numero_fichas'])
     ]) for l in fichas_report]
	columns = [dt.strftime('%Y%m') for dt in getMonthList(now, stDate, endDate)]
	forms = Formulario.objects.all()
	rows = [r.nome for r in forms]
	table_data = SortedDict().fromkeys(rows)
	for k in table_data.keys():
		table_data[k] = SortedDict().fromkeys(columns, 0)
	for f in fichas_report:
		table_data[f['formulario__nome']][f['month']] = f['numero_fichas']
	return json_response(table_data)
Ejemplo n.º 16
0
class OrderedSet(object):
    """
    A set which keeps the ordering of the inserted items.
    Currently backs onto SortedDict.
    """

    def __init__(self, iterable=None):
        self.dict = SortedDict(((x, None) for x in iterable) if iterable else [])

    def add(self, item):
        self.dict[item] = None

    def remove(self, item):
        del self.dict[item]

    def discard(self, item):
        try:
            self.remove(item)
        except KeyError:
            pass

    def __iter__(self):
        return iter(self.dict.keys())

    def __contains__(self, item):
        return item in self.dict

    def __nonzero__(self):
        return bool(self.dict)
Ejemplo n.º 17
0
class ChoiceField(Field):
    default_error_messages = {
        'invalid_choice': _('`{input}` is not a valid choice.')
    }

    def __init__(self, choices, **kwargs):
        # Allow either single or paired choices style:
        # choices = [1, 2, 3]
        # choices = [(1, 'First'), (2, 'Second'), (3, 'Third')]
        pairs = [
            isinstance(item, (list, tuple)) and len(item) == 2
            for item in choices
        ]
        if all(pairs):
            self.choices = SortedDict([(key, display_value) for key, display_value in choices])
        else:
            self.choices = SortedDict([(item, item) for item in choices])

        # Map the string representation of choices to the underlying value.
        # Allows us to deal with eg. integer choices while supporting either
        # integer or string input, but still get the correct datatype out.
        self.choice_strings_to_values = dict([
            (six.text_type(key), key) for key in self.choices.keys()
        ])

        super(ChoiceField, self).__init__(**kwargs)

    def to_internal_value(self, data):
        try:
            return self.choice_strings_to_values[six.text_type(data)]
        except KeyError:
            self.fail('invalid_choice', input=data)

    def to_representation(self, value):
        return self.choice_strings_to_values[six.text_type(value)]
Ejemplo n.º 18
0
    def init_with_context(self, context):
        """
        Please refer to the :meth:`~admin_tools.menu.items.MenuItem.init_with_context`
        documentation from :class:`~admin_tools.menu.items.MenuItem` class.
        """
        items = self._visible_models(context['request'])
        apps = {}
        for model, perms in items:
            if not perms['change']:
                continue

            app_label = model._meta.app_label
            if app_label not in apps:
                apps[app_label] = {
                    'title': django_apps.get_app_config(app_label).verbose_name,
                    'url': self._get_admin_app_list_url(model, context),
                    'models_dict': {}
                }

            apps[app_label]['models_dict'][model._meta.object_name] = {
                'title': model._meta.verbose_name_plural,
                'url': self._get_admin_change_url(model, context)
            }

        app_order_dict = SortedDict(settings.ADMIN_TOOLS_APP_ORDER)
        added_app_list = []
        added_model_list = []

        for app_label in app_order_dict.keys():
            if app_label in apps:
                item = MenuItem(title = apps[app_label]['title'], url = apps[app_label]['url'])
                added_app_list.append(app_label)

                for model_name in app_order_dict[app_label]:
                    if model_name in apps[app_label]['models_dict']:
                        model_dict = apps[app_label]['models_dict'][model_name]
                        model_path = '%s.%s' % (app_label, model_name)
                        added_model_list.append(model_path)
                        item.children.append(MenuItem(**model_dict))

                for model_name in sorted(apps[app_label]['models_dict'].keys()):
                    model_dict = apps[app_label]['models_dict'][model_name]
                    model_path = '%s.%s' % (app_label, model_name)
                    if not model_path in added_model_list:
                        item.children.append(MenuItem(**model_dict))

                self.children.append(item)

        for app in sorted(apps.keys()):
            if app not in added_app_list:
                app_dict = apps[app]
                item = MenuItem(title = app_dict['title'], url = app_dict['url'])

                for model_name in sorted(apps[app]['models_dict'].keys()):
                    model_dict = apps[app]['models_dict'][model_name]
                    model_path = '%s.%s' % (app, model_name)
                    if not model_path in added_model_list:
                        item.children.append(MenuItem(**model_dict))

                self.children.append(item)
Ejemplo n.º 19
0
def flatpage_tree(root="wiki"):
    # Create an ordered tree of all flatpages
    def _add_page(_page):
        indent = len(_page.url.split("/")[2:-1])
        return '<p>%s-&nbsp;&nbsp;<a href="%s" title="%s">%s</a></p>' % (
            "&nbsp;&nbsp;&nbsp;" * indent,
            _page.url,
            _page.title,
            _page.title,
        )

    pages = FlatPage.objects.all().order_by("url")
    from django.utils.datastructures import SortedDict

    tree = SortedDict()
    for page in pages:
        segs = page.url.split("/")[2:-1]
        # removes /wiki/ and empty last string
        if len(segs) > 0:
            tree[page.url] = [page, {}]
    menu = '<p><a href="/">Home</a></p>'
    for p in tree.keys():
        menu += _add_page(tree[p][0])
        # menu += '<li><a href="%s" title="%s">%s</a></li>' % (tree[p][0].url, tree[p][0].title, tree[p][0].url)
    # menu += '</ul>'
    return menu
Ejemplo n.º 20
0
def admin_reorder(context, token):
    """
    Called in admin/base_site.html template override and applies custom ordering
    of apps/models defined by settings.ADMIN_REORDER
    """
    # sort key function - use index of item in order if exists, otherwise item
    sort = lambda order, item: (order.index(item), "") if item in order else (
        len(order), item)
    if "app_list" in context:
        # sort the app list
        order = SortedDict(settings.ADMIN_REORDER)
        context["app_list"].sort(key=lambda app: sort(order.keys(),
            app["app_url"].strip("/").split("/")[-1]))
        for i, app in enumerate(context["app_list"]):
            # sort the model list for each app
            app_name = app["app_url"].strip("/").split("/")[-1]
            if not app_name:
                app_name = app["name"].lower()
            model_order = [m.lower() for m in order.get(app_name, [])]
            context["app_list"][i]["models"].sort(key=lambda model:
            sort(model_order, model["admin_url"].strip("/").split("/")[-1]))
            
            
            while len(context["app_list"][i]["models"]) > len(model_order):
                pos = len(context["app_list"][i]["models"]) - 1
                del context["app_list"][i]["models"][pos]

            
    return ""
Ejemplo n.º 21
0
def get_and_child(parents, cqs):
    """ Retourne les valeurs d'un SortedDict avec parents (liste reduite)
    et enfants accessibles par l'attribut enfants, les enfants sont données
    en queryset par le param cqs """
    ret = SortedDict()

    for p in parents:
        p.enfants = []
        p.file = False
        ret[p.id] = p

    parent_ids = ret.keys()
    cqs = cqs.filter(parent__in=parent_ids)
    alarms = TicketAlarm.opened.filter(ticket__in=parent_ids)
    files = TicketFile.objects.filter(ticket__in=parent_ids)

    for e in cqs:
        ret[e.parent.id].enfants.append(e)

    for a in alarms:
        ret[a.ticket_id].alarm = a

    for f in files:
        ret[f.ticket.id].file = True

    return ret.values()
Ejemplo n.º 22
0
    def test_register_order(self):
        self.pool.register(TestToolbar)
        self.pool.register(CMSToolbar)

        test_toolbar = SortedDict()
        test_toolbar['cms.tests.toolbar_pool.TestToolbar'] = TestToolbar
        test_toolbar['cms.toolbar_base.CMSToolbar'] = CMSToolbar
        self.assertEqual(list(test_toolbar.keys()), list(self.pool.toolbars.keys()))
Ejemplo n.º 23
0
def HomeView(request):
    # Default to zooming in on the UW Seattle campus if no default location is set
    if hasattr(settings, 'SS_DEFAULT_LOCATION'):
        loc = settings.SS_LOCATIONS[settings.SS_DEFAULT_LOCATION]
        center_latitude = loc['CENTER_LATITUDE']
        center_longitude = loc['CENTER_LONGITUDE']
        zoom_level = loc['ZOOM_LEVEL']
    else:
        center_latitude = '47.655003'
        center_longitude = '-122.306864'
        zoom_level = '15'

    search_args = {
        'center_latitude': center_latitude,
        'center_longitude': center_longitude,
        'open_now': '1',
        'distance': '500',
    }

    for key in request.GET:
        search_args[key] = request.GET[key]

    consumer = oauth2.Consumer(key=settings.SS_WEB_OAUTH_KEY, secret=settings.SS_WEB_OAUTH_SECRET)
    client = oauth2.Client(consumer)

    locations = settings.SS_LOCATIONS
    buildings = json.loads(get_building_json(client))

    # This could probably be a template tag, but didn't seem worth it for one-time use
    buildingdict = SortedDict()
    for building in buildings:
        if not building[0] in buildingdict.keys():  # building[0] is the first letter of the string
            buildingdict[building[0]] = []

        buildingdict[building[0]].append(building)

    # See if django-compressor is being used to precompile less
    if settings.COMPRESS_ENABLED:
        less_not_compiled = False
    else:
        less_not_compiled = True

    # See if there is a Google Analytics web property id
    try:
        ga_tracking_id = settings.GA_TRACKING_ID
    except:
        ga_tracking_id = None

    return render_to_response('app.html', {
        'center_latitude': center_latitude,
        'center_longitude': center_longitude,
        'zoom_level': zoom_level,
        'locations': locations,
        'buildingdict': buildingdict,
        'is_mobile': request.MOBILE,
        'less_not_compiled': less_not_compiled,
        'ga_tracking_id': ga_tracking_id,
    }, context_instance=RequestContext(request))
Ejemplo n.º 24
0
    def handle(self, *app_labels, **options):
        comment = options["comment"]
        app_list = SortedDict()
        # if no apps given, use all installed.
        if len(app_labels) == 0:
            for app in models.get_apps ():
                if not app in app_list.keys():
                    app_list[app] = []
                for model_class in models.get_models(app):
                    if not model_class in app_list[app]:
                        app_list[app].append(model_class)
        else:
            for label in app_labels:
                try:
                    app_label, model_label = label.split(".")
                    try:
                        app = models.get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" % app_label)

                    model_class = models.get_model(app_label, model_label)
                    if model_class is None:
                        raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
                    if app in app_list.keys():
                        if app_list[app] and model_class not in app_list[app]:
                            app_list[app].append(model_class)
                    else:
                        app_list[app] = [model_class]
                except ValueError:
                    # This is just an app - no model qualifier.
                    app_label = label
                    try:
                        app = models.get_app(app_label)
                        if not app in app_list.keys():
                            app_list[app] = []
                        for model_class in models.get_models(app):
                            if not model_class in app_list[app]:
                                app_list[app].append(model_class)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" % app_label)
        # Create revisions.
        for app, model_classes in app_list.items():
            for model_class in model_classes:
                self.create_initial_revisions(app, model_class, comment)
    def summary(self):
        print 'working on summary'
        csv_name = 'summary.csv'
        outfile_path = os.path.join(self.data_dir,  csv_name)
        outfile = open(outfile_path, 'w')

        header_translation = SortedDict([
            ('committee__filer__name', 'filer'),
            ('committee__filer__filer_id', 'filer_id'),
            ('committee__name', 'committee'),
            ('committee__filer_id_raw', 'committee_id'),
            ('cycle__name', 'cycle'),
            ('ending_cash_balance', 'ending_cash_balance'),
            ('filing__filing_id_raw', 'filing_id'),
            ('filing__start_date', 'filing_start_date'),
            ('filing__end_date', 'filing_end_date'),
            ('form_type', 'form_type'),
            ('id', 'id'),
            ('itemized_expenditures', 'itemized_expenditures'),
            (
                'itemized_monetary_contributions',
                'itemized_monetary_contributions'
            ),
            ('non_monetary_contributions', 'non_monetary_contributions'),
            ('outstanding_debts', 'outstanding_debts'),
            ('total_contributions', 'total_contributions'),
            ('total_expenditures', 'total_expenditures'),
            ('total_monetary_contributions', 'total_monetary_contributions'),
            ('unitemized_expenditures', 'unitemized_expenditures'),
            (
                'unitemized_monetary_contributions',
                'unitemized_monetary_contributions'
            ),
        ])
        csv_writer = csvkit.unicsv.UnicodeCSVDictWriter(
            outfile, fieldnames=header_translation.keys(), delimiter='|')
        csv_writer.writerow(header_translation)
        for c in Cycle.objects.all():
            dict_rows = Summary.objects.filter(cycle=c).exclude(
                dupe=True).values(*header_translation.keys())
            csv_writer.writerows(dict_rows)
        outfile.close()
        print 'Exported summary'
Ejemplo n.º 26
0
def get_events_for_year(year, event_list):
    """Returns a dictionary of events for the given year.
    
    Events are ordered by month, week, day and hour.
    """
    year_events = SortedDict([(date(int(year), i+1, 1), []) for i in range(12)])

    for month in year_events.keys():
        year_events[month] = get_events_for_month(year, month.month, event_list)

    return year_events
Ejemplo n.º 27
0
    def init_with_context(self, context):
        if self._initialized:
            return

        items = self._visible_models(context['request'])
        apps = {}

        app_order_dict = SortedDict(settings.ADMIN_TOOLS_APP_ORDER)
        added_app_list = []
        added_model_list = []

        for model, perms in items:
            app_label = model._meta.app_label
            if app_label not in apps:
                apps[app_label] = {
                    'title': django_apps.get_app_config(app_label).verbose_name,
                    'url': self._get_admin_app_list_url(model, context),
                    'models_dict': {},
                    'models': [],
                }

            model_dict = {}
            model_dict['title'] = model._meta.verbose_name_plural
            if perms['change']:
                model_dict['change_url'] = self._get_admin_change_url(model, context)
            if perms['add']:
                model_dict['add_url'] = self._get_admin_add_url(model, context)
            apps[app_label]['models_dict'][model._meta.object_name] = model_dict

        for app_label in app_order_dict.keys():
            if app_label in apps:
                for model_name in app_order_dict[app_label]:
                    if model_name in apps[app_label]['models_dict']:
                        model_dict = apps[app_label]['models_dict'][model_name]
                        model_path = '%s.%s' % (app_label, model_name)
                        added_model_list.append(model_path)
                        apps[app_label]['models'].append(model_dict)

                for model_name in sorted(apps[app_label]['models_dict'].keys()):
                    model_dict = apps[app_label]['models_dict'][model_name]
                    model_path = '%s.%s' % (app_label, model_name)
                    if not model_path in added_model_list:
                        apps[app_label]['models'].append(model_dict)

                added_app_list.append(app_label)
                self.children.append(apps[app_label])

        for app_label in sorted(apps.keys()):
            if not app_label in added_app_list:
                apps[app_label]['models'] = apps[app_label]['models_dict'].values()
                apps[app_label]['models'].sort(key = lambda x: x['title'])
                self.children.append(apps[app_label])

        self._initialized = True
Ejemplo n.º 28
0
class ToolbarPool(object):
    def __init__(self):
        self.toolbars = SortedDict()
        self._discovered = False
        self.force_register = False

    def discover_toolbars(self):
        if self._discovered:
            return
            #import all the modules
        toolbars = get_cms_setting('TOOLBARS')
        if toolbars:
            for cls in iterload_objects(toolbars):
                self.force_register = True
                self.register(cls)
                self.force_register = False
        else:
            load('cms_toolbar')
        self._discovered = True

    def clear(self):
        self.toolbars = SortedDict()
        self._discovered = False

    def register(self, toolbar):
        if not self.force_register and get_cms_setting('TOOLBARS'):
            return toolbar
        from cms.toolbar_base import CMSToolbar
        # validate the app
        if not issubclass(toolbar, CMSToolbar):
            raise ImproperlyConfigured('CMS Toolbar must inherit '
                                       'cms.toolbar_base.CMSToolbar, %r does not' % toolbar)
        name = "%s.%s" % (toolbar.__module__, toolbar.__name__)
        if name in self.toolbars.keys():
            raise ToolbarAlreadyRegistered("[%s] a toolbar with this name is already registered" % name)
        self.toolbars[name] = toolbar
        return toolbar

    def unregister(self, toolbar):
        name = '%s.%s' % (toolbar.__module__, toolbar.__name__)
        if name not in self.toolbars:
            raise ToolbarNotRegistered('The toolbar %s is not registered' % name)
        del self.toolbars[name]

    def get_toolbars(self):
        self.discover_toolbars()
        return self.toolbars

    def get_watch_models(self):
        models = []
        for toolbar in self.toolbars.values():
            if hasattr(toolbar, 'watch_models'):
                models += toolbar.watch_models
        return models
Ejemplo n.º 29
0
class BoundFieldset(object):
    is_fieldset = True

    def __init__(self, form, fieldset, name):
        self.form = form
        self.name = name
        self.fieldset = fieldset
        self.rows = SortedDict()
        for row in fieldset:
            self.rows[unicode(row)] = row

    def __getitem__(self, key):
        """
        >>> fieldset[1]
        # returns the item at index-1 in the fieldset
        >>> fieldset['name']
        # returns the item in the fieldset under the key 'name'
        """
        if isinstance(key, int) and not key in self.rows:
            return self[self.rows.keyOrder[key]]
        value = self.rows[key]
        if isinstance(value, basestring):
            return self.form[value]
        else:
            return type(self)(self.form, value, key)

    def __str__(self):
        env = {
            'fieldset': self,
            'form': self.form,
            'fieldset_template_name': 'partials/fieldset_as_div.html',
        }
        # TODO: don't hardcode the default template name.
        return render_to_string(self.template_name or 'partials/fieldset_as_div.html', env)

    def __iter__(self):
        for name in self.rows.keys():
            yield self[name]

    @property
    def template_name(self):
        return self.fieldset.template_name

    @property
    def errors(self):
        return self.form.errors.get(self.name, self.form.error_class())

    @property
    def css_classes(self):
        css_classes = set((self.fieldset.FIELDSET_CSS_CLASS, self.name))
        css_classes.update(self.fieldset.css_classes or [])
        if self.errors:
            css_classes.add(self.fieldset.error_css_class)
        return ' '.join(css_classes)
Ejemplo n.º 30
0
Archivo: base.py Proyecto: emulbreh/ecs
 def docs(self):
     d = SortedDict()
     for name in self.get_field_names():
         prefix, key = self.split_prefix(name)
         info = self.get_field_docs(name)
         if prefix:
             d.setdefault(prefix, {})
             d[prefix][key] = info
         else:
             d[name] = info
     d.keyOrder = list(sorted(d.keys()))
     return d
Ejemplo n.º 31
0
 def make_security_hash(cls, kwargs):
     kwargs = SortedDict(kwargs)
     kwargs.keyOrder.sort()
     args = list(itertools.chain(kwargs.keys(), kwargs.values()))
     return make_hash(settings.SECRET_KEY, step=2, *args)
Ejemplo n.º 32
0
class CachedFilesMixin(object):
    patterns = (("*.css", (
        r"""(url\(['"]{0,1}\s*(.*?)["']{0,1}\))""",
        r"""(@import\s*["']\s*(.*?)["'])""",
    )), )

    def __init__(self, *args, **kwargs):
        super(CachedFilesMixin, self).__init__(*args, **kwargs)
        try:
            self.cache = get_cache('staticfiles')
        except InvalidCacheBackendError:
            # Use the default backend
            self.cache = default_cache
        self._patterns = SortedDict()
        for extension, patterns in self.patterns:
            for pattern in patterns:
                compiled = re.compile(pattern)
                self._patterns.setdefault(extension, []).append(compiled)

    def hashed_name(self, name, content=None):
        parsed_name = urlsplit(unquote(name))
        clean_name = parsed_name.path
        if content is None:
            if not self.exists(clean_name):
                raise ValueError("The file '%s' could not be found with %r." %
                                 (clean_name, self))
            try:
                content = self.open(clean_name)
            except IOError:
                # Handle directory paths
                return name
        path, filename = os.path.split(clean_name)
        root, ext = os.path.splitext(filename)
        # Get the MD5 hash of the file
        md5 = hashlib.md5()
        for chunk in content.chunks():
            md5.update(chunk)
        md5sum = md5.hexdigest()[:12]
        hashed_name = os.path.join(path, u"%s.%s%s" % (root, md5sum, ext))
        unparsed_name = list(parsed_name)
        unparsed_name[2] = hashed_name
        return urlunsplit(unparsed_name)

    def cache_key(self, name):
        return u'staticfiles:cache:%s' % name

    def url(self, name, force=False):
        """
        Returns the real URL in DEBUG mode.
        """
        if settings.DEBUG and not force:
            hashed_name = name
        else:
            cache_key = self.cache_key(name)
            hashed_name = self.cache.get(cache_key)
            if hashed_name is None:
                hashed_name = self.hashed_name(name)
                # set the cache if there was a miss (e.g. if cache server goes down)
                self.cache.set(cache_key, hashed_name)
        return unquote(super(CachedFilesMixin, self).url(hashed_name))

    def url_converter(self, name):
        """
        Returns the custom URL converter for the given file name.
        """
        def converter(matchobj):
            """
            Converts the matched URL depending on the parent level (`..`)
            and returns the normalized and hashed URL using the url method
            of the storage.
            """
            matched, url = matchobj.groups()
            # Completely ignore http(s) prefixed URLs
            if url.startswith(('http', 'https')):
                return matched
            name_parts = name.split('/')
            # Using posix normpath here to remove duplicates
            url = posixpath.normpath(url)
            url_parts = url.split('/')
            parent_level, sub_level = url.count('..'), url.count('/')
            if url.startswith('/'):
                sub_level -= 1
                url_parts = url_parts[1:]
            if parent_level or not url.startswith('/'):
                start, end = parent_level + 1, parent_level
            else:
                if sub_level:
                    if sub_level == 1:
                        parent_level -= 1
                    start, end = parent_level, sub_level - 1
                else:
                    start, end = 1, sub_level - 1
            joined_result = '/'.join(name_parts[:-start] + url_parts[end:])
            hashed_url = self.url(unquote(joined_result), force=True)
            # Return the hashed and normalized version to the file
            return 'url("%s")' % unquote(hashed_url)

        return converter

    def post_process(self, paths, dry_run=False, **options):
        """
        Post process the given list of files (called from collectstatic).
        """
        processed_files = []
        # don't even dare to process the files if we're in dry run mode
        if dry_run:
            return processed_files

        # delete cache of all handled paths
        self.cache.delete_many([self.cache_key(path) for path in paths])

        # only try processing the files we have patterns for
        matches = lambda path: matches_patterns(path, self._patterns.keys())
        processing_paths = [path for path in paths if matches(path)]

        # then sort the files by the directory level
        path_level = lambda name: len(name.split(os.sep))
        for name in sorted(paths, key=path_level, reverse=True):

            # first get a hashed name for the given file
            hashed_name = self.hashed_name(name)

            with self.open(name) as original_file:
                # then get the original's file content
                content = original_file.read()

                # to apply each replacement pattern on the content
                if name in processing_paths:
                    converter = self.url_converter(name)
                    for patterns in self._patterns.values():
                        for pattern in patterns:
                            content = pattern.sub(converter, content)

                # then save the processed result
                if self.exists(hashed_name):
                    self.delete(hashed_name)

                saved_name = self._save(hashed_name, ContentFile(content))
                hashed_name = force_unicode(saved_name.replace('\\', '/'))
                processed_files.append(hashed_name)

                # and then set the cache accordingly
                self.cache.set(self.cache_key(name), hashed_name)

        return processed_files
Ejemplo n.º 33
0
def get_filters(getter_func):
    filters = SortedDict()
    for slug in FilterGroup.objects.values_list('slug', flat=True):
        for filters_slug in getter_func(slug, []):
            filters[filters_slug] = None
    return filters.keys()
Ejemplo n.º 34
0
    def handle(self, *app_labels, **options):
        from django.core.cache import cache
        from django.db.models import get_app, get_apps, get_models, get_model
        from django.utils.datastructures import SortedDict
        from johnny.cache import invalidate
        from johnny.middleware import QueryCacheMiddleware

        log = logging.getLogger('clear_johnny_cache')

        # enable queryset cache
        q = QueryCacheMiddleware()

        all_models = options.get('all_models')
        exclude = options.get('exclude')

        excluded_apps = set(get_app(app_label) for app_label in exclude)
        app_list = None

        if all_models:
            if len(app_labels):
                # FIXME: warn user that specifying apps on the command line when
                # using -all-models options has no impact
                pass
            app_list = SortedDict(
                (app, None) for app in get_apps() if app not in excluded_apps)
        elif not len(app_labels) == 0:
            app_list = SortedDict()
            for label in app_labels:
                try:
                    app_label, model_label = label.split('.')
                    try:
                        app = get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" %
                                           app_label)

                    model = get_model(app_label, model_label)
                    if model is None:
                        raise CommandError("Unknown model: %s.%s" %
                                           (app_label, model_label))

                    if app in app_list.keys():
                        if app_list[app] and model not in app_list[app]:
                            app_list[app].append(model)
                    else:
                        app_list[app] = [model]
                except ValueError:
                    # This is just an app - no model qualifier
                    app_label = label
                    try:
                        app = get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" %
                                           app_label)
                    app_list[app] = None

        if app_list:
            # Generate a list of models to be invalidated, and call the Johnny
            # Cache invalidate command.
            full_model_list = []
            for app, model_list in app_list.items():
                if model_list is None:
                    model_list = get_models(app)
                if model_list:
                    full_model_list.extend(model_list)

            log.info(
                'Trying to clear cache for %d app(s), %d model(s) to invalidate'
                % (len(app_list), len(full_model_list)))

            for model in full_model_list:
                log.info('Invalidating cache for %s' %
                         (model._meta.module_name))
                invalidate(model)
            log.info('Done invalidating')
        else:
            log.info('No model to invalidate')
Ejemplo n.º 35
0
    def handle(self, *app_labels, **options):
        from django.db.models import get_app, get_apps, get_models, get_model

        format = options.get('format', 'json')
        indent = options.get('indent', None)
        using = options.get('database', DEFAULT_DB_ALIAS)
        connection = connections[using]
        exclude = options.get('exclude', [])
        show_traceback = options.get('traceback', False)
        use_natural_keys = options.get('use_natural_keys', False)

        excluded_apps = set(get_app(app_label) for app_label in exclude)

        if len(app_labels) == 0:
            app_list = SortedDict(
                (app, None) for app in get_apps() if app not in excluded_apps)
        else:
            app_list = SortedDict()
            for label in app_labels:
                try:
                    app_label, model_label = label.split('.')
                    try:
                        app = get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" %
                                           app_label)

                    model = get_model(app_label, model_label)
                    if model is None:
                        raise CommandError("Unknown model: %s.%s" %
                                           (app_label, model_label))

                    if app in app_list.keys():
                        if app_list[app] and model not in app_list[app]:
                            app_list[app].append(model)
                    else:
                        app_list[app] = [model]
                except ValueError:
                    # This is just an app - no model qualifier
                    app_label = label
                    try:
                        app = get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" %
                                           app_label)
                    app_list[app] = None

        # Check that the serialization format exists; this is a shortcut to
        # avoid collating all the objects and _then_ failing.
        if format not in serializers.get_public_serializer_formats():
            raise CommandError("Unknown serialization format: %s" % format)

        try:
            serializers.get_serializer(format)
        except KeyError:
            raise CommandError("Unknown serialization format: %s" % format)

        # Now collate the objects to be serialized.
        objects = []
        for model in sort_dependencies(app_list.items()):
            if not model._meta.proxy:
                objects.extend(model._default_manager.using(using).all())

        try:
            return serializers.serialize(format,
                                         objects,
                                         indent=indent,
                                         use_natural_keys=use_natural_keys)
        except Exception, e:
            if show_traceback:
                raise
            raise CommandError("Unable to serialize database: %s" % e)
Ejemplo n.º 36
0
    def handle(self, *app_labels, **options):
        from django.db.models import get_app, get_apps, get_model

        format = options.get('format')
        indent = options.get('indent')
        using = options.get('database')
        excludes = options.get('exclude')
        show_traceback = options.get('traceback')
        use_natural_keys = options.get('use_natural_keys')
        use_base_manager = options.get('use_base_manager')

        excluded_apps = set()
        excluded_models = set()
        for exclude in excludes:
            if '.' in exclude:
                app_label, model_name = exclude.split('.', 1)
                model_obj = get_model(app_label, model_name)
                if not model_obj:
                    raise CommandError('Unknown model in excludes: %s' % exclude)
                excluded_models.add(model_obj)
            else:
                try:
                    app_obj = get_app(exclude)
                    excluded_apps.add(app_obj)
                except ImproperlyConfigured:
                    raise CommandError('Unknown app in excludes: %s' % exclude)

        if len(app_labels) == 0:
            app_list = SortedDict((app, None) for app in get_apps() if app not in excluded_apps)
        else:
            app_list = SortedDict()
            for label in app_labels:
                try:
                    app_label, model_label = label.split('.')
                    try:
                        app = get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" % app_label)
                    if app in excluded_apps:
                        continue
                    model = get_model(app_label, model_label)
                    if model is None:
                        raise CommandError("Unknown model: %s.%s" % (app_label, model_label))

                    if app in app_list.keys():
                        if app_list[app] and model not in app_list[app]:
                            app_list[app].append(model)
                    else:
                        app_list[app] = [model]
                except ValueError:
                    # This is just an app - no model qualifier
                    app_label = label
                    try:
                        app = get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" % app_label)
                    if app in excluded_apps:
                        continue
                    app_list[app] = None

        # Check that the serialization format exists; this is a shortcut to
        # avoid collating all the objects and _then_ failing.
        if format not in serializers.get_public_serializer_formats():
            raise CommandError("Unknown serialization format: %s" % format)

        try:
            serializers.get_serializer(format)
        except KeyError:
            raise CommandError("Unknown serialization format: %s" % format)

        # Now collate the objects to be serialized.
        objects = []
        for model in sort_dependencies(app_list.items()):
            if model in excluded_models:
                continue
            if not model._meta.proxy and router.allow_syncdb(using, model):
                if use_base_manager:
                    objects.extend(model._base_manager.using(using).all())
                else:
                    objects.extend(model._default_manager.using(using).all())

        try:
            return serializers.serialize(format, objects, indent=indent,
                        use_natural_keys=use_natural_keys)
        except Exception, e:
            if show_traceback:
                raise
            raise CommandError("Unable to serialize database: %s" % e)
Ejemplo n.º 37
0
    def compress(self, log=None, **options):
        """
        Searches templates containing 'compress' nodes and compresses them
        "offline" -- outside of the request/response cycle.

        The result is cached with a cache-key derived from the content of the
        compress nodes (not the content of the possibly linked files!).
        """
        extensions = options.get('extensions')
        extensions = self.handle_extensions(extensions or ['html'])
        verbosity = int(options.get("verbosity", 0))
        if not log:
            log = StringIO()
        if not settings.TEMPLATE_LOADERS:
            raise OfflineGenerationError("No template loaders defined. You "
                                         "must set TEMPLATE_LOADERS in your "
                                         "settings.")
        paths = set()
        for loader in self.get_loaders():
            try:
                module = import_module(loader.__module__)
                get_template_sources = getattr(module, 'get_template_sources',
                                               None)
                if get_template_sources is None:
                    get_template_sources = loader.get_template_sources
                paths.update(list(get_template_sources('')))
            except (ImportError, AttributeError):
                # Yeah, this didn't work out so well, let's move on
                pass
        if not paths:
            raise OfflineGenerationError("No template paths found. None of "
                                         "the configured template loaders "
                                         "provided template paths. See "
                                         "http://django.me/template-loaders "
                                         "for more information on template "
                                         "loaders.")
        if verbosity > 1:
            log.write("Considering paths:\n\t" + "\n\t".join(paths) + "\n")
        templates = set()
        for path in paths:
            for root, dirs, files in walk(path,
                                          followlinks=options.get(
                                              'followlinks', False)):
                templates.update(
                    os.path.join(root, name) for name in files
                    if not name.startswith('.') and any(
                        fnmatch(name, "*%s" % glob) for glob in extensions))
        if not templates:
            raise OfflineGenerationError("No templates found. Make sure your "
                                         "TEMPLATE_LOADERS and TEMPLATE_DIRS "
                                         "settings are correct.")
        if verbosity > 1:
            log.write("Found templates:\n\t" + "\n\t".join(templates) + "\n")

        compressor_nodes = SortedDict()
        for template_name in templates:
            try:
                template_file = open(template_name)
                try:
                    template = Template(template_file.read().decode(
                        settings.FILE_CHARSET))
                finally:
                    template_file.close()
            except IOError:  # unreadable file -> ignore
                if verbosity > 0:
                    log.write("Unreadable template at: %s\n" % template_name)
                continue
            except TemplateSyntaxError:  # broken template -> ignore
                if verbosity > 0:
                    log.write("Invalid template at: %s\n" % template_name)
                continue
            except UnicodeDecodeError:
                if verbosity > 0:
                    log.write("UnicodeDecodeError while trying to read "
                              "template %s\n" % template_name)
            nodes = list(self.walk_nodes(template))
            if nodes:
                compressor_nodes.setdefault(template_name, []).extend(nodes)

        if not compressor_nodes:
            raise OfflineGenerationError(
                "No 'compress' template tags found in templates.")

        if verbosity > 0:
            log.write("Found 'compress' tags in:\n\t" +
                      "\n\t".join(compressor_nodes.keys()) + "\n")

        log.write("Compressing... ")
        count = 0
        results = []
        context = Context(settings.COMPRESS_OFFLINE_CONTEXT)
        for nodes in compressor_nodes.values():
            for node in nodes:
                key = get_offline_cachekey(node.nodelist)
                try:
                    result = node.render(context, forced=True)
                except Exception, e:
                    raise CommandError("An error occured during rending: "
                                       "%s" % e)
                cache.set(key, result, settings.COMPRESS_OFFLINE_TIMEOUT)
                results.append(result)
                count += 1
Ejemplo n.º 38
0
def HomeView(request, template=None):
    # The preference order is cookie, config, then some static values
    # That fallback order will also apply if the cookie campus isn't in
    # settings.
    location = None

    if hasattr(settings, "SS_LOCATIONS"):
        m = re.match(r'^/([a-z]+)', request.path)
        if m and m.group(1) in settings.SS_LOCATIONS:
            location = m.group(1)

    if location is None:
        cookies = request.COOKIES
        if "default_location" in cookies:
            cookie_value = cookies["default_location"]
            # The format of the cookie is this, urlencoded:
            # lat,long,campus,zoom
            location = urllib.unquote(cookie_value).split(',')[2]

            if not hasattr(settings, "SS_LOCATIONS"):
                location = None

            elif not location in settings.SS_LOCATIONS:
                location = None

    if location is None:
        if hasattr(settings, 'SS_DEFAULT_LOCATION'):
            location = settings.SS_DEFAULT_LOCATION

    spaces, template_values = get_campus_data(location)

    spaces = json.dumps(spaces)

    # Default to zooming in on the UW Seattle campus if no default location is set
    if hasattr(settings, 'SS_DEFAULT_LOCATION'):
        default_location = settings.SS_DEFAULT_LOCATION
        locations = settings.SS_LOCATIONS

    if (hasattr(settings, 'SS_BUILDING_CLUSTERING_ZOOM_LEVELS')
            and hasattr(settings, 'SS_DISTANCE_CLUSTERING_RATIO')):
        by_building_zooms = settings.SS_BUILDING_CLUSTERING_ZOOM_LEVELS
        by_distance_ratio = settings.SS_DISTANCE_CLUSTERING_RATIO
    else:
        raise ImproperlyConfigured(
            "You need to configure your clustering constants in settings.py or local_settings.py"
        )

    consumer = oauth2.Consumer(key=settings.SS_WEB_OAUTH_KEY,
                               secret=settings.SS_WEB_OAUTH_SECRET)
    client = oauth2.Client(consumer)
    headers = {
        'Content-Type': 'application/json',
        'Accept': 'application/json'
    }

    if request.user and request.user.is_authenticated():
        headers["XOAUTH_USER"] = "******" % request.user.username

    log_shared_space_reference(request, headers, client)

    buildings = json.loads(get_building_json(client))

    favorites_json = get_favorites_json(headers, client)

    # This could probably be a template tag, but didn't seem worth it for one-time use
    #TODO: hey, actually it's probably going to be a Handlebars helper and template
    buildingdict = SortedDict()
    for building in buildings:
        try:
            if not building[0] in buildingdict.keys(
            ):  # building[0] is the first letter of the string
                buildingdict[building[0]] = []

            buildingdict[building[0]].append(building)
        except:
            pass

    params = {
        'username':
        request.user.username
        if request.user and request.user.is_authenticated() else '',
        'center_latitude':
        template_values['center_latitude'],
        'center_longitude':
        template_values['center_longitude'],
        'zoom_level':
        template_values['zoom_level'],
        'locations':
        locations,
        'default_location':
        default_location,
        'by_building_zooms':
        by_building_zooms,
        'by_distance_ratio':
        by_distance_ratio,
        'buildingdict':
        buildingdict,
        'spaces':
        spaces,
        'favorites_json':
        favorites_json,
    }

    response = render_to_response(template,
                                  params,
                                  context_instance=RequestContext(request))
    response['Cache-Control'] = 'no-cache'
    return response
Ejemplo n.º 39
0
    def compress(self, log=None, **options):
        """
        Searches templates containing 'compress' nodes and compresses them
        "offline" -- outside of the request/response cycle.

        The result is cached with a cache-key derived from the content of the
        compress nodes (not the content of the possibly linked files!).
        """
        extensions = options.get('extensions')
        extensions = self.handle_extensions(extensions or ['html'])
        verbosity = int(options.get("verbosity", 0))
        if not log:
            log = StringIO()
        if not settings.TEMPLATE_LOADERS:
            raise OfflineGenerationError("No template loaders defined. You "
                                         "must set TEMPLATE_LOADERS in your "
                                         "settings.")
        paths = set()
        for loader in self.get_loaders():
            try:
                module = import_module(loader.__module__)
                get_template_sources = getattr(module, 'get_template_sources',
                                               None)
                if get_template_sources is None:
                    get_template_sources = loader.get_template_sources
                paths.update(list(get_template_sources('')))
            except (ImportError, AttributeError, TypeError):
                # Yeah, this didn't work out so well, let's move on
                pass
        if not paths:
            raise OfflineGenerationError("No template paths found. None of "
                                         "the configured template loaders "
                                         "provided template paths. See "
                                         "http://django.me/template-loaders "
                                         "for more information on template "
                                         "loaders.")
        if verbosity > 1:
            log.write("Considering paths:\n\t" + "\n\t".join(paths) + "\n")
        templates = set()
        for path in paths:
            for root, dirs, files in os.walk(path,
                                             followlinks=options.get(
                                                 'followlinks', False)):
                templates.update(
                    os.path.join(root, name) for name in files
                    if not name.startswith('.') and any(
                        fnmatch(name, "*%s" % glob) for glob in extensions))
        if not templates:
            raise OfflineGenerationError("No templates found. Make sure your "
                                         "TEMPLATE_LOADERS and TEMPLATE_DIRS "
                                         "settings are correct.")
        if verbosity > 1:
            log.write("Found templates:\n\t" + "\n\t".join(templates) + "\n")

        engine = options.get("engine", "django")
        parser = self.__get_parser(engine)

        compressor_nodes = SortedDict()
        for template_name in templates:
            try:
                template = parser.parse(template_name)
            except IOError:  # unreadable file -> ignore
                if verbosity > 0:
                    log.write("Unreadable template at: %s\n" % template_name)
                continue
            except TemplateSyntaxError as e:  # broken template -> ignore
                if verbosity > 0:
                    log.write("Invalid template %s: %s\n" % (template_name, e))
                continue
            except TemplateDoesNotExist:  # non existent template -> ignore
                if verbosity > 0:
                    log.write("Non-existent template at: %s\n" % template_name)
                continue
            except UnicodeDecodeError:
                if verbosity > 0:
                    log.write("UnicodeDecodeError while trying to read "
                              "template %s\n" % template_name)
            try:
                nodes = list(parser.walk_nodes(template))
            except (TemplateDoesNotExist, TemplateSyntaxError) as e:
                # Could be an error in some base template
                if verbosity > 0:
                    log.write("Error parsing template %s: %s\n" %
                              (template_name, e))
                continue
            if nodes:
                template.template_name = template_name
                compressor_nodes.setdefault(template, []).extend(nodes)

        if not compressor_nodes:
            raise OfflineGenerationError(
                "No 'compress' template tags found in templates."
                "Try running compress command with --follow-links and/or"
                "--extension=EXTENSIONS")

        if verbosity > 0:
            log.write("Found 'compress' tags in:\n\t" +
                      "\n\t".join((t.template_name
                                   for t in compressor_nodes.keys())) + "\n")

        log.write("Compressing... ")
        count = 0
        results = []
        offline_manifest = SortedDict()
        init_context = parser.get_init_context(
            settings.COMPRESS_OFFLINE_CONTEXT)

        for template, nodes in compressor_nodes.items():
            context = Context(init_context)
            template._log = log
            template._log_verbosity = verbosity

            if not parser.process_template(template, context):
                continue

            for node in nodes:
                context.push()
                parser.process_node(template, context, node)
                rendered = parser.render_nodelist(template, context, node)
                key = get_offline_hexdigest(rendered)

                if key in offline_manifest:
                    continue

                try:
                    result = parser.render_node(template, context, node)
                except Exception as e:
                    raise CommandError("An error occured during rendering %s: "
                                       "%s" % (template.template_name, e))
                offline_manifest[key] = result
                context.pop()
                results.append(result)
                count += 1

        write_offline_manifest(offline_manifest)

        log.write("done\nCompressed %d block(s) from %d template(s).\n" %
                  (count, len(compressor_nodes)))
        return count, results
Ejemplo n.º 40
0
    def handle_noargs(self, *app_labels, **options):
        from django.db.models import get_app, get_apps, get_model

        format = options.get('format')
        indent = options.get('indent')
        using = options.get('database')
        excludes = options.get('exclude')
        show_traceback = options.get('traceback')
        use_natural_keys = options.get('use_natural_keys')
        use_base_manager = options.get('use_base_manager')
        pks = options.get('primary_keys')
        user = options.get('user')
        userid = user.id

        stdout = OutputWrapper(options.get('stdout', sys.stdout))

        if pks:
            primary_keys = pks.split(',')
        else:
            primary_keys = []

        excluded_apps = set()
        excluded_models = set()
        if excludes:
            for exclude in excludes:
                if '.' in exclude:
                    app_label, model_name = exclude.split('.', 1)
                    model_obj = get_model(app_label, model_name)
                    if not model_obj:
                        raise CommandError('Unknown model in excludes: %s' %
                                           exclude)
                    excluded_models.add(model_obj)
                else:
                    try:
                        app_obj = get_app(exclude)
                        excluded_apps.add(app_obj)
                    except ImproperlyConfigured:
                        raise CommandError('Unknown app in excludes: %s' %
                                           exclude)

        if len(app_labels) == 0:
            if primary_keys:
                raise CommandError(
                    "You can only use --pks option with one model")
            app_list = SortedDict(
                (app, None) for app in get_apps() if app not in excluded_apps)
        else:
            if len(app_labels) > 1 and primary_keys:
                raise CommandError(
                    "You can only use --pks option with one model")
            app_list = SortedDict()
            for label in app_labels:
                try:
                    app_label, model_label = label.split('.')
                    try:
                        app = get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" %
                                           app_label)
                    if app in excluded_apps:
                        continue
                    model = get_model(app_label, model_label)
                    if model is None:
                        raise CommandError("Unknown model: %s.%s" %
                                           (app_label, model_label))

                    if app in app_list.keys():
                        if app_list[app] and model not in app_list[app]:
                            app_list[app].append(model)
                    else:
                        app_list[app] = [model]
                except ValueError:
                    if primary_keys:
                        raise CommandError(
                            "You can only use --pks option with one model")
                    # This is just an app - no model qualifier
                    app_label = label
                    try:
                        app = get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" %
                                           app_label)
                    if app in excluded_apps:
                        continue
                    app_list[app] = None

        # Check that the serialization format exists; this is a shortcut to
        # avoid collating all the objects and _then_ failing.
        if format not in serializers.get_public_serializer_formats():
            try:
                serializers.get_serializer(format)
            except serializers.SerializerDoesNotExist:
                pass

            raise CommandError("Unknown serialization format: %s" % format)

        def get_objects():
            # Collate the objects to be serialized.
            for model in sort_dependencies(app_list.items()):
                if model in excluded_models:
                    continue
                if not model._meta.proxy and router.allow_syncdb(using, model):
                    if use_base_manager:
                        objects = model._base_manager
                    else:
                        objects = model._default_manager

                    queryset = objects.using(using).order_by(
                        model._meta.pk.name)
                    if primary_keys:
                        queryset = queryset.filter(pk__in=primary_keys)
                    queryset = queryset.filter(owner_id=userid)
                    for obj in queryset.iterator():
                        yield obj

        try:
            stdout.ending = None
            #            self.stdout.ending = None
            serializers.serialize(format,
                                  get_objects(),
                                  indent=indent,
                                  use_natural_keys=use_natural_keys,
                                  stream=stdout)
            stdout


#            serializers.serialize(format, get_objects(), indent=indent,
#                    use_natural_keys=use_natural_keys, stream=self.stdout)
        except Exception as e:
            if show_traceback:
                raise
            raise CommandError("Unable to serialize database: %s" % e)
Ejemplo n.º 41
0
class SortedDictTests(IgnorePendingDeprecationWarningsMixin, SimpleTestCase):
    def setUp(self):
        super(SortedDictTests, self).setUp()
        self.d1 = SortedDict()
        self.d1[7] = 'seven'
        self.d1[1] = 'one'
        self.d1[9] = 'nine'

        self.d2 = SortedDict()
        self.d2[1] = 'one'
        self.d2[9] = 'nine'
        self.d2[0] = 'nil'
        self.d2[7] = 'seven'

    def test_basic_methods(self):
        self.assertEqual(list(six.iterkeys(self.d1)), [7, 1, 9])
        self.assertEqual(list(six.itervalues(self.d1)),
                         ['seven', 'one', 'nine'])
        self.assertEqual(list(six.iteritems(self.d1)),
                         [(7, 'seven'), (1, 'one'), (9, 'nine')])

    def test_overwrite_ordering(self):
        """ Overwriting an item keeps its place. """
        self.d1[1] = 'ONE'
        self.assertEqual(list(six.itervalues(self.d1)),
                         ['seven', 'ONE', 'nine'])

    def test_append_items(self):
        """ New items go to the end. """
        self.d1[0] = 'nil'
        self.assertEqual(list(six.iterkeys(self.d1)), [7, 1, 9, 0])

    def test_delete_and_insert(self):
        """
        Deleting an item, then inserting the same key again will place it
        at the end.
        """
        del self.d2[7]
        self.assertEqual(list(six.iterkeys(self.d2)), [1, 9, 0])
        self.d2[7] = 'lucky number 7'
        self.assertEqual(list(six.iterkeys(self.d2)), [1, 9, 0, 7])

    if six.PY2:

        def test_change_keys(self):
            """
            Changing the keys won't do anything, it's only a copy of the
            keys dict.

            This test doesn't make sense under Python 3 because keys is
            an iterator.
            """
            k = self.d2.keys()
            k.remove(9)
            self.assertEqual(self.d2.keys(), [1, 9, 0, 7])

    def test_init_keys(self):
        """
        Initialising a SortedDict with two keys will just take the first one.

        A real dict will actually take the second value so we will too, but
        we'll keep the ordering from the first key found.
        """
        tuples = ((2, 'two'), (1, 'one'), (2, 'second-two'))
        d = SortedDict(tuples)

        self.assertEqual(list(six.iterkeys(d)), [2, 1])

        real_dict = dict(tuples)
        self.assertEqual(sorted(six.itervalues(real_dict)),
                         ['one', 'second-two'])

        # Here the order of SortedDict values *is* what we are testing
        self.assertEqual(list(six.itervalues(d)), ['second-two', 'one'])

    def test_overwrite(self):
        self.d1[1] = 'not one'
        self.assertEqual(self.d1[1], 'not one')
        self.assertEqual(list(six.iterkeys(self.d1)),
                         list(six.iterkeys(self.d1.copy())))

    def test_append(self):
        self.d1[13] = 'thirteen'
        self.assertEqual(repr(self.d1),
                         "{7: 'seven', 1: 'one', 9: 'nine', 13: 'thirteen'}")

    def test_pop(self):
        self.assertEqual(self.d1.pop(1, 'missing'), 'one')
        self.assertEqual(self.d1.pop(1, 'missing'), 'missing')

        # We don't know which item will be popped in popitem(), so we'll
        # just check that the number of keys has decreased.
        l = len(self.d1)
        self.d1.popitem()
        self.assertEqual(l - len(self.d1), 1)

    def test_dict_equality(self):
        d = SortedDict((i, i) for i in range(3))
        self.assertEqual(d, {0: 0, 1: 1, 2: 2})

    def test_tuple_init(self):
        d = SortedDict(((1, "one"), (0, "zero"), (2, "two")))
        self.assertEqual(repr(d), "{1: 'one', 0: 'zero', 2: 'two'}")

    def test_pickle(self):
        self.assertEqual(pickle.loads(pickle.dumps(self.d1, 2)), {
            7: 'seven',
            1: 'one',
            9: 'nine'
        })

    def test_copy(self):
        orig = SortedDict(((1, "one"), (0, "zero"), (2, "two")))
        copied = copy.copy(orig)
        self.assertEqual(list(six.iterkeys(orig)), [1, 0, 2])
        self.assertEqual(list(six.iterkeys(copied)), [1, 0, 2])

    def test_clear(self):
        self.d1.clear()
        self.assertEqual(self.d1, {})
        self.assertEqual(self.d1.keyOrder, [])

    def test_reversed(self):
        self.assertEqual(list(self.d1), [7, 1, 9])
        self.assertEqual(list(self.d2), [1, 9, 0, 7])
        self.assertEqual(list(reversed(self.d1)), [9, 1, 7])
        self.assertEqual(list(reversed(self.d2)), [7, 0, 9, 1])
Ejemplo n.º 42
0
class RestFieldset(object):
    @classmethod
    def create_from_string(cls, fields_string):
        fields = []

        for field in split_fields(fields_string):
            if is_match('^[^\(\)]+\([^\(\)]+\)$', field):
                field_name, subfields_string = field[:len(field) -
                                                     1].split('(')
                subfieldset = RFS.create_from_string(subfields_string)
            else:
                field_name = field
                subfieldset = None

            fields.append(RestField(field_name, subfieldset))

        return RestFieldset(*fields)

    @classmethod
    def create_from_list(cls, fields_list):
        if isinstance(fields_list, RestFieldset):
            fields_list = fields_list.fields

        fields = []
        for field in fields_list:
            if isinstance(field, (list, tuple)):
                field_name, subfield_list = field

                fields.append(
                    RestField(field_name, cls.create_from_list(subfield_list)))
            else:
                fields.append(field)

        return RestFieldset(*fields)

    @classmethod
    def create_from_flat_list(cls, fields_list):
        return RestFieldset(*map(RestField.create_from_string, fields_list))

    def __init__(self, *fields):
        self.fields_map = SortedDict()
        for field in fields:
            if not isinstance(field, RestField):
                field = RestField(field)
            self.append(field)

    @property
    def fields(self):
        return self.fields_map.values()

    def join(self, rest_fieldset):
        assert isinstance(rest_fieldset, RestFieldset)

        a_rfs = deepcopy(self)
        b_rfs = deepcopy(rest_fieldset)

        for rf in b_rfs.fields:
            if rf.name not in a_rfs.fields_map:
                a_rfs.fields_map[rf.name] = rf
            else:
                a_rfs.fields_map[rf.name] = a_rfs.fields_map[rf.name].join(rf)

        return a_rfs

    def intersection(self, rest_fieldset):
        assert isinstance(rest_fieldset, RestFieldset)

        a_rfs = deepcopy(self)
        b_rfs = deepcopy(rest_fieldset)

        values = []
        for rf in b_rfs.fields:
            if rf.name in a_rfs.fields_map:
                values.append(a_rfs.fields_map[rf.name].intersection(rf))

        return self.__class__(*values)

    def extend_fields_fieldsets(self, rest_fieldset):
        assert isinstance(rest_fieldset, RestFieldset)

        a_rfs = deepcopy(self)
        b_rfs = deepcopy(rest_fieldset)

        for rf in b_rfs.fields:
            if rf.subfieldset and rf.name in a_rfs.fields_map and not a_rfs.fields_map[
                    rf.name].subfieldset:
                a_rfs.fields_map[rf.name].subfieldset = rf.subfieldset

        return a_rfs

    def flat_intersection(self, rest_fieldset):
        assert isinstance(rest_fieldset, RestFieldset)

        a_rfs = deepcopy(self)
        b_rfs = deepcopy(rest_fieldset)

        values = []
        for rf in b_rfs.fields:
            if rf.name in a_rfs.fields_map:
                values.append(a_rfs.fields_map[rf.name])

        return self.__class__(*values)

    def __deepcopy__(self, memo):
        return self.__class__(*map(deepcopy, self.fields))

    def __str__(self):
        return ','.join(map(force_text, self.fields))

    def __sub__(self, rest_fieldset):
        if isinstance(rest_fieldset, (list, tuple, set)):
            rest_fieldset = RFS(*rest_fieldset)

        assert isinstance(rest_fieldset, RestFieldset)

        a_rfs = deepcopy(self)
        b_rfs = deepcopy(rest_fieldset)

        values = []
        for rf in a_rfs.fields:
            if rf.name not in b_rfs.fields_map:
                values.append(rf)

        return self.__class__(*values)

    def __bool__(self):
        return bool(self.fields_map)

    __nonzero__ = __bool__

    def get(self, key):
        return self.fields_map.get(key)

    def append(self, field):
        if isinstance(field, RestField):
            rest_field = field
        else:
            rest_field = RestField(field)

        if rest_field.name in self.fields_map:
            rest_field = rest_field.join(self.fields_map[rest_field.name])

        self.fields_map[rest_field.name] = rest_field

    def flat(self):
        return set(self.fields_map.keys())
Ejemplo n.º 43
0
    def compress(self, log=None, **options):
        """
        Searches templates containing 'compress' nodes and compresses them
        "offline" -- outside of the request/response cycle.

        The result is cached with a cache-key derived from the content of the
        compress nodes (not the content of the possibly linked files!).
        """
        extensions = options.get('extensions')
        extensions = self.handle_extensions(extensions or ['html'])
        verbosity = int(options.get("verbosity", 0))
        if not log:
            log = StringIO()
        if not settings.TEMPLATE_LOADERS:
            raise OfflineGenerationError("No template loaders defined. You "
                                         "must set TEMPLATE_LOADERS in your "
                                         "settings.")
        paths = set()
        for loader in self.get_loaders():
            try:
                module = import_module(loader.__module__)
                get_template_sources = getattr(module, 'get_template_sources',
                                               None)
                if get_template_sources is None:
                    get_template_sources = loader.get_template_sources
                paths.update(list(get_template_sources('')))
            except (ImportError, AttributeError):
                # Yeah, this didn't work out so well, let's move on
                pass
        if not paths:
            raise OfflineGenerationError("No template paths found. None of "
                                         "the configured template loaders "
                                         "provided template paths. See "
                                         "http://django.me/template-loaders "
                                         "for more information on template "
                                         "loaders.")
        if verbosity > 1:
            log.write("Considering paths:\n\t" + "\n\t".join(paths) + "\n")
        templates = set()
        for path in paths:
            for root, dirs, files in walk(path,
                                          followlinks=options.get(
                                              'followlinks', False)):
                templates.update(
                    os.path.join(root, name) for name in files
                    if not name.startswith('.') and any(
                        fnmatch(name, "*%s" % glob) for glob in extensions))
        if not templates:
            raise OfflineGenerationError("No templates found. Make sure your "
                                         "TEMPLATE_LOADERS and TEMPLATE_DIRS "
                                         "settings are correct.")
        if verbosity > 1:
            log.write("Found templates:\n\t" + "\n\t".join(templates) + "\n")

        compressor_nodes = SortedDict()
        for template_name in templates:
            try:
                template_file = open(template_name)
                try:
                    template = Template(template_file.read().decode(
                        settings.FILE_CHARSET))
                finally:
                    template_file.close()
            except IOError:  # unreadable file -> ignore
                if verbosity > 0:
                    log.write("Unreadable template at: %s\n" % template_name)
                continue
            except TemplateSyntaxError:  # broken template -> ignore
                if verbosity > 0:
                    log.write("Invalid template at: %s\n" % template_name)
                continue
            except UnicodeDecodeError:
                if verbosity > 0:
                    log.write("UnicodeDecodeError while trying to read "
                              "template %s\n" % template_name)
            nodes = list(self.walk_nodes(template))
            if nodes:
                template.template_name = template_name
                compressor_nodes.setdefault(template, []).extend(nodes)

        if not compressor_nodes:
            raise OfflineGenerationError(
                "No 'compress' template tags found in templates.")

        if verbosity > 0:
            log.write("Found 'compress' tags in:\n\t" +
                      "\n\t".join((t.template_name
                                   for t in compressor_nodes.keys())) + "\n")

        log.write("Compressing... ")
        count = 0
        results = []
        offline_manifest = {}
        for template, nodes in compressor_nodes.iteritems():
            context = Context(settings.COMPRESS_OFFLINE_CONTEXT)
            extra_context = {}
            firstnode = template.nodelist[0]
            if isinstance(firstnode, ExtendsNode):
                # If this template has a ExtendsNode, we apply our patch to
                # generate the necessary context, and then use it for all the
                # nodes in it, just in case (we don't know which nodes were
                # in a block)
                firstnode._old_get_parent = firstnode.get_parent
                firstnode.get_parent = MethodType(patched_get_parent,
                                                  firstnode)
                extra_context = firstnode.render(context)
                context.render_context = extra_context.render_context
            for node in nodes:
                context.push()
                if extra_context and node._block_name:
                    context['block'] = context.render_context[
                        BLOCK_CONTEXT_KEY].pop(node._block_name)
                    if context['block']:
                        context['block'].context = context
                key = get_offline_hexdigest(node.nodelist)
                try:
                    result = node.render(context, forced=True)
                except Exception, e:
                    raise CommandError("An error occured during rendering: "
                                       "%s" % e)
                offline_manifest[key] = result
                context.pop()
                results.append(result)
                count += 1
Ejemplo n.º 44
0
 def test_copy(self):
     orig = SortedDict(((1, "one"), (0, "zero"), (2, "two")))
     copied = copy.copy(orig)
     self.assertEqual(orig.keys(), [1, 0, 2])
     self.assertEqual(copied.keys(), [1, 0, 2])
Ejemplo n.º 45
0
    def handle(self, *app_labels, **options):
        from django.db.models import get_app, get_apps, get_model

        output_folder = options.get('output_folder')
        print "Output folder:", output_folder
        print "NOTE: See --output-folder option"
        max_records_per_chunk = options.get('max_records_per_chunk')
        format = options.get('format')
        indent = options.get('indent')
        using = options.get('database')
        excludes = options.get('exclude')
        show_traceback = options.get('traceback')
        use_natural_keys = options.get('use_natural_keys')
        use_base_manager = options.get('use_base_manager')

        excluded_apps = set()
        excluded_models = set()
        for exclude in excludes:
            if '.' in exclude:
                app_label, model_name = exclude.split('.', 1)
                model_obj = get_model(app_label, model_name)
                if not model_obj:
                    raise CommandError('Unknown model in excludes: %s' % exclude)
                excluded_models.add(model_obj)
            else:
                try:
                    app_obj = get_app(exclude)
                    excluded_apps.add(app_obj)
                except ImproperlyConfigured:
                    raise CommandError('Unknown app in excludes: %s' % exclude)

        if len(app_labels) == 0:
            app_list = SortedDict((app, None) for app in get_apps() if app not in excluded_apps)
        else:
            app_list = SortedDict()
            for label in app_labels:
                try:
                    app_label, model_label = label.split('.')
                    try:
                        app = get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" % app_label)
                    if app in excluded_apps:
                        continue
                    model = get_model(app_label, model_label)
                    if model is None:
                        raise CommandError("Unknown model: %s.%s" % (app_label, model_label))

                    if app in app_list.keys():
                        if app_list[app] and model not in app_list[app]:
                            app_list[app].append(model)
                    else:
                        app_list[app] = [model]
                except ValueError:
                    # This is just an app - no model qualifier
                    app_label = label
                    try:
                        app = get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" % app_label)
                    if app in excluded_apps:
                        continue
                    app_list[app] = None

        # Check that the serialization format exists; this is a shortcut to
        # avoid collating all the objects and _then_ failing.
        if format not in serializers.get_public_serializer_formats():
            raise CommandError("Unknown serialization format: %s" % format)

        try:
            serializers.get_serializer(format)
        except KeyError:
            raise CommandError("Unknown serialization format: %s" % format)

        # Now collate the objects to be serialized.
        objects = []
        model_count = 1000
        chunk_count = 1000
        for model in sort_dependencies(app_list.items()):
            model_count += 1
            if model in excluded_models:
                continue
            if not model._meta.proxy and router.allow_migrate(using, model):
                if use_base_manager:
                    objects.extend(model._base_manager.using(using).all())
                else:
                    items_total = model._default_manager.using(using).count()
                    chunks_total = (items_total / max_records_per_chunk) +1
                    for chunk_num in range(0, chunks_total):
                        output_objects = model._default_manager.using(using).all().order_by('id')[chunk_num*max_records_per_chunk:(chunk_num+1)*max_records_per_chunk]
                        if output_objects:
                            chunk_count += 1
                            dump_file_name = output_folder + "/%d_%d.json" % (model_count, chunk_count)
                            print "Dumping file: %s [%d]" % (dump_file_name, chunks_total)
                            output = serializers.serialize(format, output_objects, indent=indent,
                                        use_natural_keys=use_natural_keys)
                            with open(dump_file_name, "w") as dumpfile:
                                dumpfile.write(output)
        return ''
Ejemplo n.º 46
0
class Columns(object):
    """Container for spawning BoundColumns.

    This is bound to a table and provides it's ``columns`` property. It
    provides access to those columns in different ways (iterator,
    item-based, filtered and unfiltered etc)., stuff that would not be
    possible with a simple iterator in the table class.

    Note that when you define your column using a name override, e.g.
    ``author_name = tables.Column(name="author")``, then the column will
    be exposed by this container as "author", not "author_name".
    """
    def __init__(self, table):
        self.table = table
        self._columns = SortedDict()

    def _reset(self):
        """Used by parent table class."""
        self._columns = SortedDict()

    def _spawn_columns(self):
        # (re)build the "_columns" cache of BoundColumn objects (note that
        # ``base_columns`` might have changed since last time); creating
        # BoundColumn instances can be costly, so we reuse existing ones.
        new_columns = SortedDict()
        for decl_name, column in self.table.base_columns.items():
            # take into account name overrides
            exposed_name = column.name or decl_name
            if exposed_name in self._columns:
                new_columns[exposed_name] = self._columns[exposed_name]
            else:
                new_columns[exposed_name] = BoundColumn(self.table, column, decl_name)
        self._columns = new_columns

    def all(self):
        """Iterate through all columns, regardless of visiblity (as
        opposed to ``__iter__``.

        This is used internally a lot.
        """
        self._spawn_columns()
        for column in self._columns.values():
            yield column

    def items(self):
        self._spawn_columns()
        for r in self._columns.items():
            yield r

    def names(self):
        self._spawn_columns()
        for r in self._columns.keys():
            yield r

    def index(self, name):
        self._spawn_columns()
        return self._columns.keyOrder.index(name)

    def sortable(self):
        """Iterate through all sortable columns.

        This is primarily useful in templates, where iterating over the full
        set and checking {% if column.sortable %} can be problematic in
        conjunction with e.g. {{ forloop.last }} (the last column might not
        be the actual last that is rendered).
        """
        for column in self.all():
            if column.sortable:
                yield column

    def __iter__(self):
        """Iterate through all *visible* bound columns.

        This is primarily geared towards table rendering.
        """
        for column in self.all():
            if column.visible:
                yield column

    def __contains__(self, item):
        """Check by both column object and column name."""
        self._spawn_columns()
        if isinstance(item, basestring):
            return item in self.names()
        else:
            return item in self.all()

    def __len__(self):
        self._spawn_columns()
        return len([1 for c in self._columns.values() if c.visible])

    def __getitem__(self, name):
        """Return a column by name."""
        self._spawn_columns()
        return self._columns[name]
Ejemplo n.º 47
0
def make_missing_data_form(instance, required_fields=[]):
    fields = SortedDict({
        'is_professional':
        forms.BooleanField(
            label=_(u"Professionnel"),
            required=False,
            initial=False,
            widget=CommentedCheckboxInput(info_text='Je suis professionnel')),
        'company_name':
        forms.CharField(label=_(u"Nom de la société"),
                        required=False,
                        max_length=255,
                        widget=forms.TextInput(attrs={'class': 'inm'})),
        'username':
        forms.RegexField(
            label=_(u"Pseudo"),
            max_length=30,
            regex=r'^[\w.@+-]+$',
            help_text=
            _("Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only."
              ),
            error_messages={
                'invalid':
                _("This value may contain only letters, numbers and @/./+/-/_ characters."
                  )
            },
            widget=forms.TextInput(attrs={'class': 'inm'})),
        'password1':
        forms.CharField(label=_(u"Mot de passe"),
                        max_length=128,
                        required=True,
                        widget=forms.PasswordInput(attrs={'class': 'inm'})),
        'password2':
        forms.CharField(label=_(u"Mot de passe à nouveau"),
                        max_length=128,
                        required=True,
                        widget=forms.PasswordInput(attrs={'class': 'inm'})),
        'first_name':
        forms.CharField(label=_(u"Prénom"),
                        max_length=30,
                        required=True,
                        widget=forms.TextInput(attrs={'class': 'inm'})),
        'last_name':
        forms.CharField(label=_(u"Nom"),
                        max_length=30,
                        required=True,
                        widget=forms.TextInput(attrs={'class': 'inm'})),
        'addresses__address1':
        forms.CharField(label=_(u"Rue"),
                        max_length=255,
                        widget=forms.Textarea(attrs={'class': 'inm street'})),
        'addresses__zipcode':
        forms.CharField(
            label=_(u"Code postal"),
            required=True,
            max_length=9,
            widget=forms.TextInput(attrs={'class': 'inm zipcode'})),
        'addresses__city':
        forms.CharField(label=_(u"Ville"),
                        required=True,
                        max_length=255,
                        widget=forms.TextInput(attrs={'class': 'inm town'})),
        'addresses__country':
        forms.ChoiceField(label=_(u"Pays"),
                          choices=COUNTRY_CHOICES,
                          required=True,
                          widget=forms.Select(attrs={'class': 'selm'})),
        'avatar':
        forms.ImageField(required=False, label=_(u"Photo de profil")),
        'phones__phone':
        PhoneNumberField(label=_(u"Téléphone"),
                         required=True,
                         widget=forms.TextInput(attrs={'class': 'inm'})),
        'drivers_license_number':
        forms.CharField(label=_(u'Numéro de permis'), max_length=32),
        'drivers_license_date':
        DateSelectField(label=_(u'Date de délivraisance')),
        'date_of_birth':
        DateSelectField(label=_(u'Date de naissance')),
        'place_of_birth':
        forms.CharField(label=_(u'Lieu de naissance'), max_length=255),
        'cvv':
        forms.CharField(
            max_length=4,
            label=_(u'Cryptogramme de sécurité'),
            help_text=_(u'Les 3 derniers chiffres au dos de la carte.')),
        'expires':
        ExpirationField(label=_(u'Date d\'expiration')),
        'holder_name':
        forms.CharField(label=_(u'Titulaire de la carte')),
        'card_number':
        CreditCardField(label=_(u'Numéro de carte de crédit')),
        'godfather_email':
        forms.EmailField(
            label=_(u'Email de votre parrain'),
            required=False,
            help_text=
            _(u'Commissions offertes pendant 3 mois si vous êtes parrainé par membre e-loue. Offre valable entre le 18 avril et le 30 avril 2013.'
              )),
    })

    # Are we in presence of a pro ?
    if fields.has_key('is_professional'):
        if instance and getattr(instance, 'is_professional', None) != None:
            del fields['is_professional']
            del fields['company_name']

    # Do we have an address ?
    if instance and instance.addresses.exists():
        fields['addresses'] = forms.ModelChoiceField(
            label=_(u"Adresse"),
            required=False,
            queryset=instance.addresses.all(),
            initial=instance.default_address
            if instance.default_address else instance.addresses.all()[0],
            widget=forms.Select(attrs={'class': 'selm'}),
            help_text=_(u"Selectionnez une adresse enregistrée précédemment"))
        for f in fields.keys():
            if "addresses" in f:
                fields[f].required = False

    # Do we have a phone number ?
    if instance and instance.phones.exists():
        fields['phones'] = forms.ModelChoiceField(
            label=_(u"Téléphone"),
            required=False,
            queryset=instance.phones.all(),
            initial=instance.phones.all()[0],
            widget=forms.Select(attrs={'class': 'selm'}),
            help_text=_(
                u"Selectionnez un numéro de téléphone enregistré précédemment")
        )
        if fields.has_key('phones__phone'):
            fields['phones__phone'].required = False

    # Do we have a password ?
    if fields.has_key('password1'):
        if instance and getattr(instance, 'password', None):
            del fields['password1']
            del fields['password2']

    if instance and instance.username and "first_name" not in required_fields:
        del fields['avatar']

    if instance:
        try:
            if instance.creditcard:
                del fields['cvv']
                del fields['expires']
                del fields['holder_name']
                del fields['card_number']
        except CreditCard.DoesNotExist:
            pass

    for f in fields.keys():
        if required_fields and f not in required_fields:
            del fields[f]
            continue
        if "__" in f or f in ["addresses", "phones", "password"]:
            continue
        if hasattr(instance, f) and getattr(instance, f):
            del fields[f]

    def save(self):
        for attr, value in self.cleaned_data.iteritems():
            if attr == "password1":
                self.instance.set_password(value)
            if "addresses" not in attr and "phones" not in attr:  # wtf is this checking?
                setattr(self.instance, attr, value)
        if 'addresses' in self.cleaned_data and self.cleaned_data['addresses']:
            address = self.cleaned_data['addresses']
        elif 'addresses__address1' in self.cleaned_data:
            address = self.instance.addresses.create(
                address1=self.cleaned_data['addresses__address1'],
                zipcode=self.cleaned_data['addresses__zipcode'],
                city=self.cleaned_data['addresses__city'],
                country=self.cleaned_data['addresses__country'])
            self.instance.default_address = address
        else:
            address = None
        if 'phones' in self.cleaned_data and self.cleaned_data['phones']:
            phone = self.cleaned_data['phones']
        elif 'phones__phone' in self.cleaned_data:
            phone = self.instance.phones.create(
                number=self.cleaned_data['phones__phone'])
        else:
            phone = None
        if self.cleaned_data.get('card_number'):
            pm = PayboxManager()
            subscriber_reference = uuid.uuid4().hex
            self.cleaned_data['card_number'] = pm.subscribe(
                subscriber_reference, self.cleaned_data['card_number'],
                self.cleaned_data['expires'], self.cleaned_data['cvv'])
            credit_card = CreditCard.objects.create(
                subscriber_reference=subscriber_reference,
                masked_number=self.cleaned_data['masked_number'],
                card_number=self.cleaned_data['card_number'],
                holder_name=self.cleaned_data['holder_name'],
                expires=self.cleaned_data['expires'],
                holder=self.instance,
                keep=True)
        else:
            credit_card = None

        self.instance.save()
        return self.instance, address, phone, credit_card

    def clean_username(self):
        if Patron.objects.filter(
                username=self.cleaned_data['username']).exists():
            raise forms.ValidationError(
                _(u"Ce nom d'utilisateur est déjà pris."))
        if Patron.objects.filter(
                slug=slugify(self.cleaned_data['username'])).exists():
            raise forms.ValidationError(
                _(u"Ce nom d'utilisateur est déjà pris."))
        return self.cleaned_data['username']

    def clean_addresses(self):
        addresses = self.cleaned_data['addresses']
        address1 = self.cleaned_data['addresses__address1']
        zipcode = self.cleaned_data['addresses__zipcode']
        city = self.cleaned_data['addresses__city']
        country = self.cleaned_data['addresses__country']

        if not addresses and not (address1 and zipcode and city and country):
            raise forms.ValidationError(_(u"Vous devez spécifiez une adresse"))
        return self.cleaned_data['addresses']

    def clean_company_name(self):
        is_professional = self.cleaned_data.get('is_professional')
        company_name = self.cleaned_data.get('company_name', None)
        if is_professional and not company_name:
            raise forms.ValidationError(
                _(u"Vous devez entrer le nom de votre société"))
        return company_name

    def clean_phones(self):
        phones = self.cleaned_data['phones']
        phone = self.cleaned_data['phones__phone']

        if not phones and not phone:
            raise forms.ValidationError(
                _(u"Vous devez spécifiez un numéro de téléphone"))
        return phones

    def clean(self):
        if self.errors:
            return self.cleaned_data

        if self.cleaned_data.get('card_number'):
            try:
                pm = PayboxManager()
                self.cleaned_data['masked_number'] = mask_card_number(
                    self.cleaned_data['card_number'])
                pm.authorize(self.cleaned_data['card_number'],
                             self.cleaned_data['expires'],
                             self.cleaned_data['cvv'], 1, 'verification')
            except PayboxException:
                raise forms.ValidationError(
                    _(u'La validation de votre carte a échoué.'))

        # testing passwords against each other:
        password1 = self.cleaned_data.get('password1')
        password2 = self.cleaned_data.get('password2')

        if password1 != password2:
            msg = _(u"Vos mots de passe ne correspondent pas")
            self._errors['password1'] = [msg]
            self._errors['password2'] = [msg]
            del self.cleaned_data['password1']
            del self.cleaned_data['password2']

        return self.cleaned_data

    class Meta:
        fieldsets = [
            ('member', {
                'fields': [
                    'is_professional', 'company_name', 'username', 'password1',
                    'password2', 'first_name', 'last_name', 'avatar',
                    'godfather_email', 'date_of_birth', 'place_of_birth'
                ],
                'legend':
                'Vous'
            }),
            ('driver_info', {
                'fields': ['drivers_license_number', 'drivers_license_date'],
                'legend': _(u'Permis de conduire')
            }),
            ('contacts', {
                'fields': [
                    'addresses', 'addresses__address1', 'addresses__zipcode',
                    'addresses__city', 'addresses__country', 'phones',
                    'phones__phone'
                ],
                'legend':
                'Vos coordonnées'
            }),
            ('payment', {
                'fields': [
                    'cvv',
                    'expires',
                    'holder_name',
                    'card_number',
                ],
                'legend': 'Vos coordonnées bancaires'
            }),
        ]

    class_dict = fields.copy()
    class_dict.update({'instance': instance, 'Meta': Meta})
    form_class = type('MissingInformationForm', (BetterForm, ), class_dict)
    form_class.save = types.MethodType(save, None, form_class)
    form_class.clean = types.MethodType(clean, None, form_class)
    form_class.clean_username = types.MethodType(clean_username, None,
                                                 form_class)
    form_class.clean_phones = types.MethodType(clean_phones, None, form_class)
    form_class.clean_addresses = types.MethodType(clean_addresses, None,
                                                  form_class)
    form_class.clean_company_name = types.MethodType(clean_company_name, None,
                                                     form_class)
    return fields != {}, form_class
Ejemplo n.º 48
0
    def handle(self, *app_labels, **options):
        from django.db.models import get_app, get_apps, get_models, get_model

        format = options.get('format','json')
        indent = options.get('indent',None)
        exclude = options.get('exclude',[])
        show_traceback = options.get('traceback', False)

        excluded_apps = [get_app(app_label) for app_label in exclude]

        if len(app_labels) == 0:
            app_list = SortedDict([(app, None) for app in get_apps() if app not in excluded_apps])
        else:
            app_list = SortedDict()
            for label in app_labels:
                try:
                    app_label, model_label = label.split('.')
                    try:
                        app = get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" % app_label)

                    model = get_model(app_label, model_label)
                    if model is None:
                        raise CommandError("Unknown model: %s.%s" % (app_label, model_label))

                    if app in app_list.keys():
                        if app_list[app] and model not in app_list[app]:
                            app_list[app].append(model)
                    else:
                        app_list[app] = [model]
                except ValueError:
                    # This is just an app - no model qualifier
                    app_label = label
                    try:
                        app = get_app(app_label)
                    except ImproperlyConfigured:
                        raise CommandError("Unknown application: %s" % app_label)
                    app_list[app] = None

        # Check that the serialization format exists; this is a shortcut to
        # avoid collating all the objects and _then_ failing.
        if format not in serializers.get_public_serializer_formats():
            raise CommandError("Unknown serialization format: %s" % format)

        try:
            serializers.get_serializer(format)
        except KeyError:
            raise CommandError("Unknown serialization format: %s" % format)

        objects = []
        for app, model_list in app_list.items():
            if model_list is None:
                model_list = get_models(app)

            for model in model_list:
                objects.extend(model.objects.all())

        try:
            return serializers.serialize(format, objects, indent=indent)
        except Exception, e:
            if show_traceback:
                raise
            raise CommandError("Unable to serialize database: %s" % e)
Ejemplo n.º 49
0
class CachedFilesMixin(object):
    patterns = (("*.css", (
        br"""(url\(['"]{0,1}\s*(.*?)["']{0,1}\))""",
        br"""(@import\s*["']\s*(.*?)["'])""",
    )), )

    def __init__(self, *args, **kwargs):
        super(CachedFilesMixin, self).__init__(*args, **kwargs)
        try:
            self.cache = get_cache('staticfiles')
        except InvalidCacheBackendError:
            # Use the default backend
            self.cache = default_cache
        self._patterns = SortedDict()
        for extension, patterns in self.patterns:
            for pattern in patterns:
                compiled = re.compile(pattern)
                self._patterns.setdefault(extension, []).append(compiled)

    def file_hash(self, name, content=None):
        """
        Retuns a hash of the file with the given name and optional content.
        """
        if content is None:
            return None
        md5 = hashlib.md5()
        for chunk in content.chunks():
            md5.update(chunk)
        return md5.hexdigest()[:12]

    def hashed_name(self, name, content=None):
        parsed_name = urlsplit(unquote(name))
        clean_name = parsed_name.path.strip()
        if content is None:
            if not self.exists(clean_name):
                raise ValueError("The file '%s' could not be found with %r." %
                                 (clean_name, self))
            try:
                content = self.open(clean_name)
            except IOError:
                # Handle directory paths and fragments
                return name
        path, filename = os.path.split(clean_name)
        root, ext = os.path.splitext(filename)
        file_hash = self.file_hash(clean_name, content)
        if file_hash is not None:
            file_hash = ".%s" % file_hash
        hashed_name = os.path.join(path, "%s%s%s" % (root, file_hash, ext))
        unparsed_name = list(parsed_name)
        unparsed_name[2] = hashed_name
        # Special casing for a @font-face hack, like url(myfont.eot?#iefix")
        # http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
        if '?#' in name and not unparsed_name[3]:
            unparsed_name[2] += '?'
        return urlunsplit(unparsed_name)

    def cache_key(self, name):
        return 'staticfiles:%s' % hashlib.md5(smart_str(name)).hexdigest()

    def url(self, name, force=False):
        """
        Returns the real URL in DEBUG mode.
        """
        if settings.DEBUG and not force:
            hashed_name, fragment = name, ''
        else:
            clean_name, fragment = urldefrag(name)
            if urlsplit(clean_name).path.endswith('/'):  # don't hash paths
                hashed_name = name
            else:
                cache_key = self.cache_key(name)
                hashed_name = self.cache.get(cache_key)
                if hashed_name is None:
                    hashed_name = self.hashed_name(clean_name).replace(
                        '\\', '/')
                    # set the cache if there was a miss
                    # (e.g. if cache server goes down)
                    self.cache.set(cache_key, hashed_name)

        final_url = super(CachedFilesMixin, self).url(hashed_name)

        # Special casing for a @font-face hack, like url(myfont.eot?#iefix")
        # http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
        query_fragment = '?#' in name  # [sic!]
        if fragment or query_fragment:
            urlparts = list(urlsplit(final_url))
            if fragment and not urlparts[4]:
                urlparts[4] = fragment
            if query_fragment and not urlparts[3]:
                urlparts[2] += '?'
            final_url = urlunsplit(urlparts)

        return unquote(final_url)

    def url_converter(self, name):
        """
        Returns the custom URL converter for the given file name.
        """
        def converter(matchobj):
            """
            Converts the matched URL depending on the parent level (`..`)
            and returns the normalized and hashed URL using the url method
            of the storage.
            """
            matched, url = matchobj.groups()
            # Completely ignore http(s) prefixed URLs,
            # fragments and data-uri URLs
            if url.startswith(('#', 'http:', 'https:', 'data:')):
                return matched
            name_parts = name.split(os.sep)
            # Using posix normpath here to remove duplicates
            url = posixpath.normpath(url)
            url_parts = url.split('/')
            parent_level, sub_level = url.count('..'), url.count('/')
            if url.startswith('/'):
                sub_level -= 1
                url_parts = url_parts[1:]
            if parent_level or not url.startswith('/'):
                start, end = parent_level + 1, parent_level
            else:
                if sub_level:
                    if sub_level == 1:
                        parent_level -= 1
                    start, end = parent_level, 1
                else:
                    start, end = 1, sub_level - 1
            joined_result = '/'.join(name_parts[:-start] + url_parts[end:])
            hashed_url = self.url(unquote(joined_result), force=True)
            file_name = hashed_url.split('/')[-1:]
            relative_url = '/'.join(url.split('/')[:-1] + file_name)

            # Return the hashed version to the file
            return 'url("%s")' % unquote(relative_url)

        return converter

    def post_process(self, paths, dry_run=False, **options):
        """
        Post process the given list of files (called from collectstatic).

        Processing is actually two separate operations:

        1. renaming files to include a hash of their content for cache-busting,
           and copying those files to the target storage.
        2. adjusting files which contain references to other files so they
           refer to the cache-busting filenames.

        If either of these are performed on a file, then that file is considered
        post-processed.
        """
        # don't even dare to process the files if we're in dry run mode
        if dry_run:
            return

        # where to store the new paths
        hashed_paths = {}

        # build a list of adjustable files
        matches = lambda path: matches_patterns(path, self._patterns.keys())
        adjustable_paths = [path for path in paths if matches(path)]

        # then sort the files by the directory level
        path_level = lambda name: len(name.split(os.sep))
        for name in sorted(paths.keys(), key=path_level, reverse=True):

            # use the original, local file, not the copied-but-unprocessed
            # file, which might be somewhere far away, like S3
            storage, path = paths[name]
            with storage.open(path) as original_file:

                # generate the hash with the original content, even for
                # adjustable files.
                hashed_name = self.hashed_name(name, original_file)

                # then get the original's file content..
                if hasattr(original_file, 'seek'):
                    original_file.seek(0)

                hashed_file_exists = self.exists(hashed_name)
                processed = False

                # ..to apply each replacement pattern to the content
                if name in adjustable_paths:
                    content = original_file.read()
                    converter = self.url_converter(name)
                    for patterns in self._patterns.values():
                        for pattern in patterns:
                            content = pattern.sub(converter, content)
                    if hashed_file_exists:
                        self.delete(hashed_name)
                    # then save the processed result
                    content_file = ContentFile(smart_str(content))
                    saved_name = self._save(hashed_name, content_file)
                    hashed_name = force_unicode(saved_name.replace('\\', '/'))
                    processed = True
                else:
                    # or handle the case in which neither processing nor
                    # a change to the original file happened
                    if not hashed_file_exists:
                        processed = True
                        saved_name = self._save(hashed_name, original_file)
                        hashed_name = force_unicode(
                            saved_name.replace('\\', '/'))

                # and then set the cache accordingly
                hashed_paths[self.cache_key(name)] = hashed_name
                yield name, hashed_name, processed

        # Finally set the cache
        self.cache.set_many(hashed_paths)