Esempio n. 1
0
 def column_search(self, queryset):
     '''Filter a queryset with column search'''
     for idx, search in self.column_set:
         if hasattr(self, 'search_col_%s' % idx):
             custom_search = getattr(self, 'search_col_%s' % idx)
             queryset = custom_search(search, queryset)
         else:
             field = self.get_field(idx)
             fields = RE_FORMATTED.findall(field) if RE_FORMATTED.match(
                 field) else [field]
             if self.dt_data['bRegex_%s' % idx]:
                 criterions = [
                     Q(**{'%s__iregex' % field: search}) for field in fields
                     if self.can_regex(field)
                 ]
                 if len(criterions) > 0:
                     search = reduce(or_, criterions)
                     queryset = queryset.filter(search)
             else:
                 for term in search.split():
                     criterions = (Q(**{'%s__icontains' % field: term})
                                   for field in fields)
                     search = reduce(or_, criterions)
                     queryset = queryset.filter(search)
     return queryset
    def filter_queryset(self, request, queryset, view):
        search_fields = getattr(view, 'word_fields', None)

        if not search_fields:
            return queryset

        orm_lookups = [self.construct_search(str(search_field))
                       for search_field in search_fields]

        search_term = request.query_params.get(self.search_param, '').split()

        if not search_term:
            return queryset

        lookup_list = list()
        for orm_lookup in orm_lookups:
            and_query = list()
            for term in search_term:
                if term.startswith('-') or term.endswith('-'):
                    term = term.replace('-', '')
                and_query.append(
                    reduce(operator.or_, [models.Q(**{lookup: prep_term}) for lookup, prep_term in zip(orm_lookup, self.construct_term(term))]))
            lookup_list.append(reduce(operator.and_, and_query))
        queryset = queryset.filter(reduce(operator.or_, lookup_list))
        return queryset
Esempio n. 3
0
    def filter_queryset(self, request, queryset, view):
        search_fields = getattr(view, 'word_fields', None)

        if not search_fields:
            return queryset

        orm_lookups = [
            self.construct_search(str(search_field))
            for search_field in search_fields
        ]

        search_term = request.query_params.get(self.search_param, '').split()

        if not search_term:
            return queryset

        lookup_list = list()
        for orm_lookup in orm_lookups:
            and_query = list()
            for term in search_term:
                if term.startswith('-') or term.endswith('-'):
                    term = term.replace('-', '')
                and_query.append(
                    reduce(operator.or_, [
                        models.Q(**{lookup: prep_term})
                        for lookup, prep_term in zip(orm_lookup,
                                                     self.construct_term(term))
                    ]))
            lookup_list.append(reduce(operator.and_, and_query))
        queryset = queryset.filter(reduce(operator.or_, lookup_list))
        return queryset
Esempio n. 4
0
 def column_search(self, queryset):
     '''Filter a queryset with column search'''
     for idx in xrange(self.dt_data['iColumns']):
         search = self.dt_data['sSearch_%s' % idx]
         if search:
             if hasattr(self, 'search_col_%s' % idx):
                 custom_search = getattr(self, 'search_col_%s' % idx)
                 queryset = custom_search(search, queryset)
             else:
                 field = self.get_field(idx)
                 fields = RE_FORMATTED.findall(field) if RE_FORMATTED.match(field) else [field]
                 if self.dt_data['bRegex_%s' % idx]:
                     criterions = [
                         Q(**{'%s__iregex' % field: search})
                         for field in fields
                         if self.can_regex(field) and self.is_searchable(field)
                     ]
                     if len(criterions) > 0:
                         search = reduce(or_, criterions)
                         queryset = queryset.filter(search)
                 else:
                     for term in search.split():
                         criterions = [
                             Q(**{'%s__icontains' % field: term})
                             for field in fields
                             if self.is_searchable(field)
                         ]
                         search = reduce(or_, criterions)
                         queryset = queryset.filter(search)
     return queryset
Esempio n. 5
0
 def global_search(self, queryset):
     '''Filter a queryset with global search'''
     search = self.dt_data['sSearch']
     if search:
         if self.dt_data['bRegex']:
             criterions = [
                 Q(**{'%s__iregex' % field: search})
                 for field in self.get_db_fields()
                 if self.can_regex(field) and
                 self.is_searchable(field)
             ]
             if len(criterions) > 0:
                 search = reduce(or_, criterions)
                 queryset = queryset.filter(search)
         else:
             for term in search.split():
                 criterions = [
                     Q(**{'%s__icontains' % field: term})
                     for field in self.get_db_fields()
                     if self.is_searchable(field)
                 ]
                 if len(criterions) > 0:
                     search = reduce(or_, criterions)
                     queryset = queryset.filter(search)
     return queryset
Esempio n. 6
0
    def filter_queryset(self, request, queryset, view):
        search_fields = getattr(view, 'search_fields', None)
        search_terms_result = self.get_search_terms(request)
        search_terms = search_terms_result['terms']
        search_exclude = None
        if search_terms_result.get('exclude'):
            search_exclude = search_terms_result['exclude']

        if not search_fields and not search_terms:
            return queryset

        orm_lookups = [
            self.construct_search(six.text_type(search_field))
            for search_field in search_fields
        ]

        base = queryset
        conditions = []
        if len(search_terms):
            for search_term in search_terms:
                queries = [
                    models.Q(**{orm_lookup: search_term})
                    for orm_lookup in orm_lookups
                ]
                conditions.append(reduce(operator.or_, queries))
            
            queryset = queryset.filter(reduce(operator.and_, conditions))

        if self.must_call_distinct(queryset, search_fields):
            # Filtering against a many-to-many field requires us to
            # call queryset.distinct() in order to avoid duplicate items
            # in the resulting queryset.
            # We try to avoid this if possible, for performance reasons.
            queryset = distinct(queryset, base)
        if search_exclude:
            exclude_conditions = []
            if len(search_terms):
                for search_term in search_terms:
                    for excl in search_exclude:
                        queries = [
                            models.Q(**{orm_lookup: search_term + ' ' + excl})
                            for orm_lookup in orm_lookups
                        ]
                        exclude_conditions.append(reduce(operator.or_, queries))
            else:
                for excl in search_exclude:
                    queries = [
                        models.Q(**{orm_lookup: excl})
                        for orm_lookup in orm_lookups
                    ]
                    exclude_conditions.append(reduce(operator.or_, queries))
            queryset = queryset.exclude(reduce(operator.and_, exclude_conditions))
        return queryset
Esempio n. 7
0
 def _from_file(self, fileobj, tmpdir):
     if zipfile.is_zipfile(fileobj):
         with zipfile.ZipFile(fileobj) as zf:
             extracted = []
             for item in zf.infolist():
                 fname = os.path.abspath(os.path.join(tmpdir, item.filename))
                 if fname.startswith(tmpdir):
                     zf.extract(item, tmpdir)
                     extracted.append(fname)
             for path in extracted:
                 if path.endswith('.shp'):
                     fname = path
     else:
         # NOTE: is_zipfile() seeks to end of file or at least 110 bytes.
         fileobj.seek(0)
         with tempfile.NamedTemporaryFile(dir=tmpdir, delete=False) as fp:
             shutil.copyfileobj(fileobj, fp)
         fname = fp.name
     # Attempt to union all geometries from GDAL data source.
     try:
         geoms = gdal.DataSource(fname)[0].get_geoms()
         geom = reduce(lambda g1, g2: g1.union(g2), geoms)
         if not geom.srs:
             raise gdal.GDALException('Cannot determine SRS')
     except (gdal.GDALException, IndexError):
         raise forms.ValidationError(
             GeometryField.default_error_messages['invalid_geom'],
             code='invalid_geom')
     return geom
Esempio n. 8
0
def matches(request):
    user = User.objects.get(username=request.user.username)
    profile = Profile.objects.get(user=user)
    all_matches = MatchesTable.objects.filter(from_user=user)

    if request.method == 'POST':
        seconduser = User.objects.get(username=request.POST['r_id'])
        if 'Delete Match' in request.POST:
            t = all_matches.filter(to_user=seconduser).values_list('id',
                                                                   flat=True)
            match = MatchesTable.objects.get(id=t[0])
            match.like = False
            match.save()
            return HttpResponseRedirect(reverse('matches'))

    match_filter = [Q()]
    for match in all_matches:
        if match_exists(user, match.to_user):
            match_filter.append(Q(to_user=match.to_user))

    if len(match_filter) > 1:
        valid_matches = all_matches.filter(reduce(operator.ior, match_filter))
    else:
        valid_matches = Profile.objects.none()

    return render(request, 'matches.html', {
        'matches_list': valid_matches,
    })
Esempio n. 9
0
    def get_prefetch_queryset(self, instances, queryset=None):
        models = set([type(i) for i in instances])

        # Handle case where instances are different models (and consequently,
        # different content types)
        if len(models) > 1:
            bulk_qsets = []
            for model, group in itertools.groupby(instances, type):
                model_instances = list(group)
                field = getattr(model, self.name)
                bulk_qsets.append(field.bulk_related_objects(model_instances))
            bulk_qset = reduce(operator.or_, bulk_qsets)

            def rel_obj_attr(rel_obj):
                content_type = getattr(rel_obj, "%s_id" % self.content_type_field_name)
                object_id = getattr(rel_obj, self.object_id_field_name)
                return (content_type, object_id)

            def get_ctype_obj_id(obj):
                field = getattr(obj.__class__, self.name)
                content_type = ContentType.objects.get_for_model(obj, field.for_concrete_model)
                return (content_type.pk, obj._get_pk_val())

            return (bulk_qset,
                rel_obj_attr,
                get_ctype_obj_id,
                True,
                self.attname) + (() if django.VERSION < (2, 0) else (True,))

        return (self.bulk_related_objects(instances),
            operator.attrgetter(self.object_id_field_name),
            lambda obj: obj._get_pk_val(),
            True,
            self.attname) + (() if django.VERSION < (2, 0) else (True,))
Esempio n. 10
0
 def _from_file(self, fileobj, tmpdir):
     if zipfile.is_zipfile(fileobj):
         with zipfile.ZipFile(fileobj) as zf:
             extracted = []
             for item in zf.infolist():
                 fname = os.path.abspath(os.path.join(
                     tmpdir, item.filename))
                 if fname.startswith(tmpdir):
                     zf.extract(item, tmpdir)
                     extracted.append(fname)
             for path in extracted:
                 if path.endswith('.shp'):
                     fname = path
     else:
         # NOTE: is_zipfile() seeks to end of file or at least 110 bytes.
         fileobj.seek(0)
         with tempfile.NamedTemporaryFile(dir=tmpdir, delete=False) as fp:
             shutil.copyfileobj(fileobj, fp)
         fname = fp.name
     # Attempt to union all geometries from GDAL data source.
     try:
         geoms = gdal.DataSource(fname)[0].get_geoms()
         geom = reduce(lambda g1, g2: g1.union(g2), geoms)
         if not geom.srs:
             raise gdal.GDALException('Cannot determine SRS')
     except (gdal.GDALException, IndexError):
         raise forms.ValidationError(
             GeometryField.default_error_messages['invalid_geom'],
             code='invalid_geom')
     return geom
 def handle_merge(self, loader, conflicts):
     """
     Handles merging together conflicted migrations interactively,
     if it's safe; otherwise, advises on how to fix it.
     """
     if self.interactive:
         questioner = InteractiveMigrationQuestioner()
     else:
         questioner = MigrationQuestioner(defaults={"ask_merge": True})
     for app_label, migration_names in conflicts.items():
         # Grab out the migrations in question, and work out their
         # common ancestor.
         merge_migrations = []
         for migration_name in migration_names:
             migration = loader.get_migration(app_label, migration_name)
             migration.ancestry = loader.graph.forwards_plan((app_label, migration_name))
             merge_migrations.append(migration)
         common_ancestor = None
         for level in zip(*[m.ancestry for m in merge_migrations]):
             if reduce(operator.eq, level):
                 common_ancestor = level[0]
             else:
                 break
         if common_ancestor is None:
             raise ValueError("Could not find common ancestor of %s" % migration_names)
         # Now work out the operations along each divergent branch
         for migration in merge_migrations:
             migration.branch = migration.ancestry[(migration.ancestry.index(common_ancestor) + 1) :]
             migration.merged_operations = []
             for node_app, node_name in migration.branch:
                 migration.merged_operations.extend(loader.get_migration(node_app, node_name).operations)
         # In future, this could use some of the Optimizer code
         # (can_optimize_through) to automatically see if they're
         # mergeable. For now, we always just prompt the user.
         if self.verbosity > 0:
             self.stdout.write(self.style.MIGRATE_HEADING("Merging %s" % app_label))
             for migration in merge_migrations:
                 self.stdout.write(self.style.MIGRATE_LABEL("  Branch %s" % migration.name))
                 for operation in migration.merged_operations:
                     self.stdout.write("    - %s\n" % operation.describe())
         if questioner.ask_merge(app_label):
             # If they still want to merge it, then write out an empty
             # file depending on the migrations needing merging.
             numbers = [MigrationAutodetector.parse_number(migration.name) for migration in merge_migrations]
             try:
                 biggest_number = max([x for x in numbers if x is not None])
             except ValueError:
                 biggest_number = 1
             subclass = type(
                 "Migration",
                 (Migration,),
                 {"dependencies": [(app_label, migration.name) for migration in merge_migrations]},
             )
             new_migration = subclass("%04i_merge" % (biggest_number + 1), app_label)
             writer = MigrationWriter(new_migration)
             with open(writer.path, "wb") as fh:
                 fh.write(writer.as_string())
             if self.verbosity > 0:
                 self.stdout.write("\nCreated new merge migration %s" % writer.path)
Esempio n. 12
0
    def filter_search(self, qs):
        kwargs = {}
        for key, value in self.sFilters.iteritems():
            sKey = key.split(':')

            filterList = True
            if not isinstance(value, list):
                value = [value]
                filterList = False

            for index, item in enumerate(value):
                if isinstance(item, str) or isinstance(item, unicode):
                    try:
                        value[index] = self.timezone.localize(
                            datetime.datetime.strptime(
                                item, '%m/%d/%Y %I:%M %p'
                            )
                        ).astimezone(
                            pytz.utc
                        ).replace(
                            tzinfo=None
                        )

                    except:
                        if item.isdigit():
                            value[index] = int(item)

            if len(sKey) > 1:  # range search
                if sKey[1] == 'from':
                    kwargs[sKey[0] + '__gte'] = value[0]
                elif sKey[1] == 'to':
                    kwargs[sKey[0] + '__lt'] = value[0]

            elif filterList:  # list search
                args = []
                for i in value:
                    args.append(Q(**{sKey[0]: i}))

                qs = qs.filter(reduce(operator.or_, args))

            elif isinstance(value[0], types.BooleanType) or isinstance(
                value[0], types.IntType
            ):  # boolean search
                if value[0] is True:
                    kwargs[sKey[0] + '__gt'] = 0

                else:
                    kwargs[sKey[0]] = 0

            else:  # text search
                if sKey[0].endswith('sha256'):
                    kwargs[sKey[0]] = hashlib.sha256(value[0]).hexdigest()
                else:
                    kwargs[sKey[0] + '__icontains'] = value[0]

        if len(kwargs) > 0:
            qs = qs.filter(**kwargs)

        return qs
 def test_key(self):
     """Test using a custom `key` function."""
     iterables = [range(5, 0, -1), range(4, 0, -1)]
     self.assertEqual(
         list(
             sorted(reduce(list.__add__, [list(it) for it in iterables]),
                    reverse=True)),
         list(collate(*iterables, key=lambda x: -x)))
def append_lookup_keys(model, fields):
    new_fields = []
    for field in fields:
        try:
            new_field = append_lookup_key(model, field)
        except AttributeError:
            new_field = (field, )
        new_fields.append(new_field)

    return moves.reduce(set.union, new_fields, set())
Esempio n. 15
0
 def global_search(self, queryset):
     '''Filter a queryset with global search'''
     search = self.dt_data['sSearch']
     if search:
         if self.dt_data['bRegex']:
             criterions = [
                 Q(**{'%s__iregex' % field: search})
                 for field in self.get_db_fields() if self.can_regex(field)
             ]
             if len(criterions) > 0:
                 search = reduce(or_, criterions)
                 queryset = queryset.filter(search)
         else:
             for term in search.split():
                 criterions = (Q(**{'%s__icontains' % field: term})
                               for field in self.get_db_fields())
                 search = reduce(or_, criterions)
                 queryset = queryset.filter(search)
     return queryset
Esempio n. 16
0
 def global_search(self, queryset):
     '''Filter a queryset with global search'''
     search = self.dt_data['sSearch']
     if search:
         if self.dt_data['bRegex']:
             criterions = (Q(**{'%s__iregex' % field: search}) for field in self.get_db_fields())
             search = reduce(or_, criterions)
             queryset = queryset.filter(search)
         else:
             ors = []
             for comma_split in search.split(','):
                 ands = []
                 for term in comma_split.split():
                     criterions = (Q(**{'%s__icontains' % field: term}) for field in self.get_db_fields())
                     single_term = reduce(Q.__or__, criterions)
                     ands.append(single_term)
                 search = reduce(Q.__and__, ands)
                 ors.append(search)
             search = reduce(Q.__or__, ors)
             queryset = queryset.filter(search)
     return queryset
Esempio n. 17
0
 def option(self, name, *args):
     """Send the OPTION command."""
     if self.__connected:
         self.__send('OPTION %s%s' %
                     (name, reduce(lambda x, y: str(x) + ' ' + str(y),
                                   args, '')))
         res = self.__read()
         code, msg = res[0].split(' ', 1)
         if int(code) == 250:
             if name.lower() == 'mime':
                 self.mime = True
             return True
         return False
Esempio n. 18
0
def create_image(multiband=False):
    tmpname = os.path.basename(tempfile.mktemp(prefix='tmin_', suffix='.tif'))
    fp = default_storage.open(tmpname, 'w+b')
    shape = (5, 5)
    if multiband:
        shape += (3, )
    b = bytearray(range(reduce(operator.mul, shape)))
    ras = raster.frombytes(bytes(b), shape)
    ras.affine = (-120, 2, 0, 38, 0, -2)
    ras.sref = 4326
    ras.save(fp)
    ras.close()
    fp.seek(0)
    return fp
Esempio n. 19
0
    def column_search(self, queryset):
        '''Filter a queryset with column search'''

        i = 0
        while i < len(self.dt_data):

            if self.dt_data.get('columns[%s][search][value]' % i, None) is not None:

                search = self.dt_data['columns[%s][search][value]' % i]
                if search:

                    if hasattr(self, 'search_col_%s' % i):
                        custom_search = getattr(self, 'search_col_%s' % i)
                        queryset = custom_search(search, queryset)

                    else:
                        fieldT = self.get_field(i)
                        fields = RE_FORMATTED.findall(fieldT) if RE_FORMATTED.match(fieldT) else [fieldT]
                        if self.dt_data['columns[%s][search][regex]' % i]:

                            criterions = [Q(**{'%s__iregex' % field: search}) for field in fields if self.can_regex(field)]
                            if len(criterions) > 0:
                                search = reduce(or_, criterions)
                                queryset = queryset.filter(search)

                        else:
                            for term in search.split():
                                criterions = (Q(**{'%s__icontains' % field: term}) for field in fields)
                                search = reduce(or_, criterions)
                                queryset = queryset.filter(search)

            else:
                break

            i += 1

        return queryset
Esempio n. 20
0
 def global_search(self, queryset):
     '''Filter a queryset with global search'''
     search = self.dt_data['sSearch']
     if search:
         search_fields = list()
         for k,v in self.dt_data.items():
             if k.startswith("bSearchable_") and v == True:
                 index = int(k.split("bSearchable_")[1])
                 search_fields.append(self.get_field(index))
         if self.dt_data['bRegex']:
             criterions = [
                 Q(**{'%s__iregex' % field: search})
                 for field in search_fields
                 if self.can_regex(field)
             ]
             if len(criterions) > 0:
                 search = reduce(or_, criterions)
                 queryset = queryset.filter(search)
         else:
             for term in search.split():
                 criterions = (Q(**{'%s__icontains' % field: term}) for field in search_fields)
                 search = reduce(or_, criterions)
                 queryset = queryset.filter(search)
     return queryset
Esempio n. 21
0
 def generate_query(self):
     """ Reduces multiple queries into a single usable query """
     query = Q()
     ORed = []
     for form in self._non_deleted_forms:
         if not hasattr(form, 'cleaned_data'):
             continue
         if form.cleaned_data['field'] == "_OR":
             ORed.append(query)
             query = Q()
         else:
             query = query & form.make_query()
     if ORed:
         if query:  # add last query for OR if any
             ORed.append(query)
         query = reduce(operator.or_, ORed)
     return query
Esempio n. 22
0
    def get_existing_queryset(self, batch):
        key = self.migration.primary_key
        model = self.migration.model
        manager = model._base_manager
        if not batch or key is None:
            return manager.none()

        if not isinstance(key, collections.Iterable):
            key_attr = key.attname
            return manager.filter(
                **
                {'{0}__in'.format(key_attr): (row[key_attr] for row in batch)})
        else:
            key_attrs = [k.attname for k in key]
            return manager.filter(
                reduce(operator.or_,
                       (Q(**{attr: row[attr]
                             for attr in key_attrs}) for row in batch)))
Esempio n. 23
0
    def _queryset_from_parameters(self):
        params = []
        stacked_params = Q()
        try:
            for parameter in self.parameters:
                params.append(parameter.filter_param & stacked_params)
                stacked_params &= parameter.eq_param
        except MissingBorderObjectException:
            return self._queryset.none()
        queryset = self._queryset._clone()
        queryset.query.clear_limits()
        if params:
            queryset = queryset.filter(reduce(operator.or_, params))
        ordering = self.get_ordering()
        qs = queryset.order_by(*ordering)
        # Remove redundant query where clauses

        # qs.query = reduce_redundant_clauses(qs.query)

        return qs
Esempio n. 24
0
    def search(self, *args, **kwargs):
        """
        Proxy to queryset's search method for the manager's model and any
        models that subclass from this manager's model if the model is
        abstract.
        """
        if getattr(self.model._meta, 'abstract', False):
            models = [
                m
                for m
                in apps.get_models()
                if issubclass(m, self.model)
            ]
            parents = reduce(ior, [
                set(m._meta.get_parent_list())
                for m in
                models
            ])
            # Strip out any models that are superclasses of models.
            models = [m for m in models if m not in parents]
        else:
            models = [self.model]

        kwargs['order_results'] = False
        kwargs['decorate_results'] = True
        user = kwargs.pop('user', None)
        customer = kwargs.pop('customer', None)
        results = []
        for model in models:
            qs = model._default_manager.get_queryset()
            if hasattr(qs, 'active'):
                qs = qs.active(user)
            if hasattr(qs, 'available'):
                qs = qs.available(user, customer)
            if hasattr(qs, 'enabled'):
                qs = qs.enabled(user)
            if hasattr(qs, 'published'):
                qs = qs.published(user)
            results.extend(qs.search(*args, **kwargs))

        return sorted(results, key=lambda r: r.search_score, reverse=True)
Esempio n. 25
0
    def get_boundaries(self, sets=None):
        r = {
            'boundaries_concordance': [],
            'boundaries_centroid': []
        }

        concordances = PostcodeConcordance.objects.filter(code=self.code).values_list('boundary', flat=True)

        if sets:
            concordances = [boundary for boundary in concordances if boundary.split('/')[0] in sets]

        concordance_sets = set()

        if concordances:
            q = ((models.Q(set=concordance.split('/')[0]) & models.Q(slug=concordance.split('/')[1])) for concordance in concordances)

            boundaries = Boundary.objects.filter(reduce(lambda a, b: a | b, q))
            boundaries = Boundary.prepare_queryset_for_get_dicts(boundaries)
            boundaries = Boundary.get_dicts(boundaries)

            r['boundaries_concordance'] = boundaries

            for boundary in boundaries:
                concordance_sets.add(boundary['related']['boundary_set_url'])

        if self.centroid:
            q = models.Q(shape__contains=self.centroid)

            if sets:
                q &= models.Q(set__in=sets)

            boundaries = Boundary.objects.filter(q)
            boundaries = Boundary.prepare_queryset_for_get_dicts(boundaries)
            boundaries = Boundary.get_dicts(boundaries)

            r['boundaries_centroid'] = [boundary for boundary in boundaries if boundary['related']['boundary_set_url'] not in concordance_sets]

        return r
Esempio n. 26
0
    def get_queryset(self):
        """
        Constructs an '__contains' or '__icontains' filter across all of the
        fields listed in ``SEARCH_FIELDS``.
        """
        qs = super(SearchForm, self).get_queryset()

        # Do Searching
        q = self.cleaned_data.get('q', '').strip()
        if q:
            args = []
            for field in self.SEARCH_FIELDS:
                if self.CASE_SENSITIVE:
                    kwarg = {field + '__contains': q}
                else:
                    kwarg = {field + '__icontains': q}
                args.append(Q(**kwarg))
            if len(args) > 1:
                qs = qs.filter(reduce(lambda x, y: x | y, args))
            elif len(args) == 1:
                qs = qs.filter(args[0])

        return qs
Esempio n. 27
0
    def get_queryset(self):
        """
        Constructs an '__contains' or '__icontains' filter across all of the
        fields listed in ``SEARCH_FIELDS``.
        """
        qs = super(SearchForm, self).get_queryset()

        # Do Searching
        q = self.cleaned_data.get('q', '').strip()
        if q:
            args = []
            for field in self.SEARCH_FIELDS:
                if self.CASE_SENSITIVE:
                    kwarg = {field + '__contains': q}
                else:
                    kwarg = {field + '__icontains': q}
                args.append(Q(**kwarg))
            if len(args) > 1:
                qs = qs.filter(reduce(lambda x, y: x | y, args))
            elif len(args) == 1:
                qs = qs.filter(args[0])

        return qs
Esempio n. 28
0
def home(request):
    user = request.user
    profile = request.user.profile

    if (profile.first_login):
        profile.first_login = False
        profile.save()
        return redirect('update_profile', username=user)

    if request.method == 'POST':
        seconduser = User.objects.get(username=request.POST['r_id'])
        if 'accept' in request.POST:
            MatchesTable.objects.create(from_user=user,
                                        to_user=seconduser,
                                        like=True)
            seconduser.profile.rank += 1
            seconduser.profile.save()
        if 'reject' in request.POST:
            MatchesTable.objects.create(from_user=user,
                                        to_user=seconduser,
                                        like=False)

    course_filter = Q(profile__courses__in=profile.courses.all())
    all_matches = User.objects.filter(course_filter).distinct()

    match_filter = [~Q(username=request.user.username)]
    for match in all_matches:
        if card_processed(user, match):
            match_filter.append(~Q(username=match.username))
    new_match = all_matches.filter(reduce(operator.iand, match_filter))[0:1]
    finished = not new_match.exists()

    return render(request, 'home.html', {
        'user': user,
        'match': new_match,
        'finished': finished
    })
Esempio n. 29
0
 def get_all_choices(cls, *args, **kwargs):
     """
     Validate (template), de-duplicate (by template), sort (by label) and
     return a list of ``(template name, label)`` choices for all plugins.
     """
     plugins = cls.get_plugins(*args, **kwargs)
     all_choices = reduce(operator.add,
                          [plugin.choices for plugin in plugins])
     choices = []
     seen = set()
     for template, label in all_choices:
         # De-duplicate.
         if template in seen:
             continue
         seen.add(template)
         # Validate.
         try:
             validators.template_name(template)
         except ValidationError:
             continue
         choices.append((template, label))
     # Sort by label.
     choices = sorted(choices, key=lambda a: (a[0], a[1]))
     return choices
Esempio n. 30
0
def index(request, **args):
    site_url = '%s://%s' % (request.is_secure() and 'https'
                            or 'http', request.get_host())
    page = {
        'ctx': args.get('ctx', ''),
        'backtime': True,
        'robots': 'index',
        'public': False,
        'site_url': site_url,
        'base_url': settings.BASE_URL,
        'login_url': settings.LOGIN_URL,
        'favicon': settings.FAVICON,
        'author_name': settings.FEED_AUTHOR_NAME,
        'author_uri': getattr(settings, 'FEED_AUTHOR_URI', False),
        'taguri': settings.FEED_TAGURI,
        'icon': settings.FEED_ICON,
        'maps_engine': settings.MAPS_ENGINE,
        'fb_app_id': settings.FACEBOOK_APP_ID,
        'pshb_hubs': settings.PSHB_HUBS,
    }
    authed = request.user.is_authenticated() and request.user.is_staff
    friend = request.user.is_authenticated() and not request.user.is_staff
    urlparams = []
    entries_on_page = settings.ENTRIES_ON_PAGE
    entries_orderby = 'date_published'

    # Entries filter.
    fs = {'active': True, 'service__home': True}

    # Filter by dates.
    year = int(args.get('year', 0))
    month = int(args.get('month', 0))
    day = int(args.get('day', 0))
    if year:
        fs[entries_orderby + '__year'] = year
    if month:
        fs[entries_orderby + '__month'] = month
    if day:
        fs[entries_orderby + '__day'] = day
    if year and month and day:
        dt = datetime.date(year, month, day).strftime('%Y/%m/%d')
    elif year and month:
        dt = datetime.date(year, month, 1)
        prev, next = pn_month_start(dt)
        page['month_nav'] = True
        page['month_prev'] = prev.strftime('%Y/%m')
        page['month_next'] = next.strftime('%Y/%m')
        dt = dt.strftime('%Y/%m')
    elif year:
        dt = datetime.date(year, 1, 1).strftime('%Y')

    if year:
        page['backtime'] = False
        page['title'] = dt
        page['subtitle'] = _('You are currently browsing the archive for %s'
                             ) % ('<b>' + dt + '</b>')
        page['robots'] = 'noindex'

    if page['backtime']:
        entries = Entry.objects.order_by('-' + entries_orderby)
    else:
        entries = Entry.objects.order_by(entries_orderby)

    if not authed:
        fs['draft'] = False
    if not authed or page['ctx'] == 'public':
        fs['service__public'] = True
        page['public'] = True

    # Filter for favorites.
    if page['ctx'] == 'favorites':
        if not authed:
            return HttpResponseRedirect(settings.BASE_URL + '/')
        favs = Favorite.objects.filter(user__id=request.user.id)
        page['favorites'] = True
        page['title'] = _('Favorites')
        page['subtitle'] = _(
            'You are currently browsing your favorite entries')
        fs['id__in'] = favs.values('entry')

    # Filter lists.
    elif 'list' in args:
        try:
            services = List.objects.get(user__id=request.user.id,
                                        slug=args['list']).services
            del fs['service__home']
            fs['service__id__in'] = services.values('id')
            page['ctx'] = 'list/' + args['list']
            page['title'] = args['list']
            page['subtitle'] = _(
                'You are currently browsing entries from %s list only.') % (
                    '<b>' + args['list'] + '</b>')
        except List.DoesNotExist:
            if authed:
                raise Http404

    # Filter for exactly one given entry.
    elif 'entry' in args:
        fs['id__exact'] = int(args['entry'])
        page['exactentry'] = True
        if authed and 'service__public' in fs:
            del fs['service__public']

    if not authed:
        page['ctx'] = ''

    # Filter by class type.
    cls = request.GET.get('class', 'all')
    if cls != 'all':
        fs['service__cls'] = cls
        urlparams.append('class=' + cls)
        page['robots'] = 'noindex'
        if 'subtitle' in page:
            page['subtitle'] += ' <b>(%s)</b>' % escape(cls.capitalize())
        else:
            page['subtitle'] = _('You are currently browsing %s entries only.'
                                 ) % ('<b>' + escape(cls) + '</b>')

    # Filter by author name.
    author = request.GET.get('author', 'all')
    if author != 'all':
        fs['author_name'] = author
        urlparams.append('author=' + author)
        page['robots'] = 'noindex'

    # Filter by service type.
    srvapi = request.GET.get('service', 'all')
    if srvapi != 'all':
        fs['service__api'] = srvapi
        urlparams.append('service=' + srvapi)
        page['robots'] = 'noindex'
        srvapi_name = dict(API_LIST).get(srvapi, srvapi.capitalize())
        if 'subtitle' in page:
            page['subtitle'] += ' <b>(%s)</b>' % escape(srvapi_name)
        else:
            page['subtitle'] = _(
                'You are currently browsing entries from %s service only.') % (
                    '<b>' + escape(srvapi_name) + '</b>')

    # Filter entries after specified timestamp 'start'.
    after = False
    start = request.GET.get('start', False)
    if start:
        qs = fs.copy()
        try:
            dt = datetime.datetime.fromtimestamp(float(start))
        except ValueError:
            raise Http404

        if page['backtime']:
            fs[entries_orderby + '__lte'] = dt
            qs[entries_orderby + '__gt'] = fs[entries_orderby + '__lte']
            q = Entry.objects.order_by(entries_orderby)
        else:
            fs[entries_orderby + '__gte'] = dt
            qs[entries_orderby + '__lt'] = fs[entries_orderby + '__gte']
            q = Entry.objects.order_by('-' + entries_orderby)

        q = q.filter(**qs)[0:entries_on_page].values(entries_orderby)
        if len(q):
            after = q[len(q) - 1][entries_orderby]
            after = int(time.mktime(after.timetuple()))
        page['title'] = '%s' % str(dt)[0:-3]
        page['robots'] = 'noindex'

    # Search/Query entries.
    search_enable = getattr(settings, 'SEARCH_ENABLE', False)
    search_engine = getattr(settings, 'SEARCH_ENGINE', 'sphinx')
    search_query = request.GET.get('s', '')

    if search_query != '' and search_enable:
        page['search'] = search_query
        page['title'] = 'Search Results for %s' % escape(search_query)
        page['subtitle'] = _(
            'Your search for %s returned the following results.') % (
                '<b>' + escape(search_query) + '</b>')
        urlparams.append('s=' + search_query)
        sfs = {}
        if not authed and not friend:
            sfs['friends_only'] = False
        page_number = int(request.GET.get('page', 1))
        offset = (page_number - 1) * entries_on_page

        try:
            if search_engine == 'sphinx':
                select = "SELECT * FROM %s WHERE MATCH('%s')"
                if page['public']:
                    select += ' AND public=1'
                if 'friends_only' in sfs and sfs['friends_only'] == False:
                    select += ' AND friends_only=0'
                select += ' LIMIT 1000'

                cursor = connections['sphinx'].cursor()
                cursor.execute(select %
                               (settings.SPHINX_INDEX_NAME, search_query))
                res = __dictfetchall(cursor)
                uids = [ent['id'] for ent in res]
                entries = entries.filter(id__in=uids).select_related()
            else:  # db search
                if page['public']:
                    sfs['service__public'] = True
                sfs['content__icontains'] = search_query
                entries = entries.filter(**sfs).select_related()

            limit = offset + entries_on_page
            if offset >= entries_on_page:
                page['prevpage'] = page_number - 1
            if limit < entries.count():
                page['nextpage'] = page_number + 1

            entries = entries[offset:limit]
        except:
            entries = []

        start = False

    # If not search, then normal query.
    else:
        entries = entries.filter(**fs)[0:entries_on_page + 1].select_related()
        num = len(entries)

        if 'exactentry' in page and num:
            page['title'] = truncatewords(entries[0].title, 7)

        # Time-based pagination.
        if num > entries_on_page:
            start = entries[num - 1].__getattribute__(entries_orderby)
            start = int(time.mktime(start.timetuple()))
        else:
            start = False

        entries = entries[0:entries_on_page]

        if num:
            crymax = entries[0].date_published.year
            crymin = entries[len(entries) - 1].date_published.year
            if crymin != crymax:
                page['copyright_years'] = '%s-%s' % (crymin, crymax)
            else:
                page['copyright_years'] = crymin

    # Build URL params for links.
    if len(urlparams):
        urlparams = '?' + reduce(
            lambda x, y: six.text_type(x) + '&' + six.text_type(y), urlparams,
            '')[1:] + '&'
    else:
        urlparams = '?'

    if len(entries):
        page['updated'] = entries[0].date_published
    else:
        page['updated'] = datetime.datetime.utcnow()
    page['urlparams'] = urlparams
    page['start'] = start
    page['after'] = after

    if hasattr(settings, 'STREAM_TITLE'):
        page_title = settings.STREAM_TITLE
    else:
        page_title = None

    if hasattr(settings, 'STREAM_DESCRIPTION'):
        page['description'] = settings.STREAM_DESCRIPTION

    # Set page theme.
    page['themes'] = settings.THEMES
    page['themes_more'] = True if len(settings.THEMES) > 1 else False
    page['theme'] = common.get_theme(request)

    # Setup links.
    page['need_fbc'] = False
    for entry in entries:
        entry.only_for_friends = entry.friends_only

        if authed or friend:
            entry.friends_only = False
        elif entry.friends_only:
            page['need_fbc'] = True

        if not entry.friends_only:
            entry.gls_link = '%s/%s' % (urlresolvers.reverse(
                'entry', args=[entry.id
                               ]), gls_slugify(truncatewords(entry.title, 7)))
        else:
            entry.gls_link = '%s/' % (urlresolvers.reverse('entry',
                                                           args=[entry.id]))
            if 'title' in page:
                del page['title']

        entry.gls_absolute_link = '%s%s' % (page['site_url'], entry.gls_link)

    # Check single-entry URL
    if 'exactentry' in page:
        if len(entries):
            gls_link = entries[0].gls_link
            if gls_link != request.path:
                return HttpResponsePermanentRedirect(gls_link)
            page['canonical_link'] = urllib.parse.urljoin(
                settings.BASE_URL, gls_link)
        else:
            raise Http404

    if 'title' in page and page['title'] != '':
        if page_title:
            page['title'] += getattr(settings, 'STREAM_TITLE_SUFFIX',
                                     ' | ' + page_title)
    elif page_title:
        page['title'] = page_title

    # Pickup right output format and finish.
    format = request.GET.get('format', 'html')
    if format == 'atom':
        return render_to_response('stream.atom', {
            'entries': entries,
            'page': page
        },
                                  content_type='application/atom+xml')
    elif format == 'json':
        cb = request.GET.get('callback', False)
        return render_to_response('stream.json', {
            'entries': entries,
            'page': page,
            'callback': cb
        },
                                  content_type='application/json')
    elif format == 'html-pure' and request.is_ajax():
        # Check which entry is already favorite.
        if authed and page['ctx'] != 'favorites':
            ents = [entry.id for entry in entries]
            favs = Favorite.objects.filter(user__id=request.user.id,
                                           entry__id__in=ents)
            favs = [f.entry_id for f in favs]
            for entry in entries:
                if entry.id in favs:
                    entry.fav = True
                if entry.service.api in ('twitter', 'identica'):
                    entry.sms = True
        d = {
            'next':
            page['start'],
            'stream':
            strip_spaces_between_tags(
                render_to_string(
                    'stream-pure.html', {
                        'entries': entries,
                        'page': page,
                        'authed': authed,
                        'friend': friend
                    })),
        }
        if 'nextpage' in page:
            d['next'] = page['nextpage']
        return HttpResponse(json.dumps(d), content_type='application/json')
    elif format != 'html':
        raise Http404
    else:
        # Check which entry is already favorite.
        if authed and page['ctx'] != 'favorites':
            ents = [entry.id for entry in entries]
            favs = Favorite.objects.filter(user__id=request.user.id,
                                           entry__id__in=ents)
            favs = [f.entry_id for f in favs]
            for entry in entries:
                if entry.id in favs:
                    entry.fav = True
                if entry.service.api in ('twitter', 'identica'):
                    entry.sms = True

        # Get lists.
        lists = List.objects.filter(user__id=request.user.id).order_by('name')

        # Get archives.
        if 'entry' in args:
            qs = {}
        else:
            qs = fs.copy()
            if year:
                del qs[entries_orderby + '__year']
            if month:
                del qs[entries_orderby + '__month']
            if day:
                del qs[entries_orderby + '__day']
        archs = Entry.objects.filter(**qs).dates('date_published',
                                                 'month',
                                                 order='DESC')
        page['months12'] = [datetime.date(2010, x, 1) for x in range(1, 13)]

        # List available classes.
        fs = {}
        if not authed or page['ctx'] == 'public':
            fs['public'] = True
        _classes = Service.objects.filter (**fs).order_by ('id')\
            .values('api', 'cls')
        classes = {}
        for item in _classes:
            if item['cls'] not in classes:
                classes[item['cls']] = item
        classes = list(classes.values())

        accept_lang = request.META.get('HTTP_ACCEPT_LANGUAGE', '').split(',')
        for i, lang in enumerate(accept_lang):
            accept_lang[i] = lang.split(';')[0]
        page['lang'] = accept_lang[0]

        request.user.fb_username = request.session.get('fb_username', '')
        request.user.fb_profile_url = request.session.get('fb_profile_url', '')

        res = render_to_response(
            'stream.html', {
                'classes': classes,
                'entries': entries,
                'lists': lists,
                'archives': archs,
                'page': page,
                'authed': authed,
                'friend': friend,
                'has_search': search_enable,
                'is_secure': request.is_secure(),
                'user': request.user
            })
        res['X-XRDS-Location'] = request.build_absolute_uri(
            urlresolvers.reverse('glifestream.gauth.views.xrds'))
        return res
Esempio n. 31
0
 def alter_field(self, model, old_field, new_field, strict=False):
     """
     Allows a field's type, uniqueness, nullability, default, column,
     constraints etc. to be modified.
     Requires a copy of the old field as well so we can only perform
     changes that are required.
     If strict is true, raises errors if the old column does not match old_field precisely.
     """
     # Ensure this field is even column-based
     old_db_params = old_field.db_parameters(connection=self.connection)
     old_type = old_db_params['type']
     new_db_params = new_field.db_parameters(connection=self.connection)
     new_type = new_db_params['type']
     if old_type is None and new_type is None and (
             old_field.rel.through and new_field.rel.through
             and old_field.rel.through._meta.auto_created
             and new_field.rel.through._meta.auto_created):
         return self._alter_many_to_many(model, old_field, new_field,
                                         strict)
     elif old_type is None or new_type is None:
         raise ValueError(
             "Cannot alter field %s into %s - they are not compatible types (probably means only one is an M2M with implicit through model)"
             % (
                 old_field,
                 new_field,
             ))
     # Has unique been removed?
     if old_field.unique and (not new_field.unique or
                              (not old_field.primary_key
                               and new_field.primary_key)):
         # Find the unique constraint for this field
         constraint_names = self._constraint_names(model,
                                                   [old_field.column],
                                                   unique=True)
         if strict and len(constraint_names) != 1:
             raise ValueError(
                 "Found wrong number (%s) of unique constraints for %s.%s" %
                 (
                     len(constraint_names),
                     model._meta.db_table,
                     old_field.column,
                 ))
         for constraint_name in constraint_names:
             self.execute(
                 self.sql_delete_unique % {
                     "table": self.quote_name(model._meta.db_table),
                     "name": constraint_name,
                 }, )
     # Removed an index?
     if old_field.db_index and not new_field.db_index and not old_field.unique and not (
             not new_field.unique and old_field.unique):
         # Find the index for this field
         index_names = self._constraint_names(model, [old_field.column],
                                              index=True)
         if strict and len(index_names) != 1:
             raise ValueError(
                 "Found wrong number (%s) of indexes for %s.%s" % (
                     len(index_names),
                     model._meta.db_table,
                     old_field.column,
                 ))
         for index_name in index_names:
             self.execute(
                 self.sql_delete_index % {
                     "table": self.quote_name(model._meta.db_table),
                     "name": index_name,
                 })
     # Drop any FK constraints, we'll remake them later
     if old_field.rel:
         fk_names = self._constraint_names(model, [old_field.column],
                                           foreign_key=True)
         if strict and len(fk_names) != 1:
             raise ValueError(
                 "Found wrong number (%s) of foreign key constraints for %s.%s"
                 % (
                     len(fk_names),
                     model._meta.db_table,
                     old_field.column,
                 ))
         for fk_name in fk_names:
             self.execute(
                 self.sql_delete_fk % {
                     "table": self.quote_name(model._meta.db_table),
                     "name": fk_name,
                 })
     # Change check constraints?
     if old_db_params['check'] != new_db_params['check'] and old_db_params[
             'check']:
         constraint_names = self._constraint_names(model,
                                                   [old_field.column],
                                                   check=True)
         if strict and len(constraint_names) != 1:
             raise ValueError(
                 "Found wrong number (%s) of check constraints for %s.%s" %
                 (
                     len(constraint_names),
                     model._meta.db_table,
                     old_field.column,
                 ))
         for constraint_name in constraint_names:
             self.execute(
                 self.sql_delete_check % {
                     "table": self.quote_name(model._meta.db_table),
                     "name": constraint_name,
                 })
     # Have they renamed the column?
     if old_field.column != new_field.column:
         self.execute(
             self.sql_rename_column % {
                 "table": self.quote_name(model._meta.db_table),
                 "old_column": self.quote_name(old_field.column),
                 "new_column": self.quote_name(new_field.column),
                 "type": new_type,
             })
     # Next, start accumulating actions to do
     actions = []
     # Type change?
     if old_type != new_type:
         actions.append((
             self.sql_alter_column_type % {
                 "column": self.quote_name(new_field.column),
                 "type": new_type,
             },
             [],
         ))
     # Default change?
     old_default = self.effective_default(old_field)
     new_default = self.effective_default(new_field)
     if old_default != new_default:
         if new_default is None:
             actions.append((
                 self.sql_alter_column_no_default % {
                     "column": self.quote_name(new_field.column),
                 },
                 [],
             ))
         else:
             if self.connection.features.requires_literal_defaults:
                 # Some databases can't take defaults as a parameter (oracle)
                 # If this is the case, the individual schema backend should
                 # implement prepare_default
                 actions.append((
                     self.sql_alter_column_default % {
                         "column": self.quote_name(new_field.column),
                         "default": self.prepare_default(new_default),
                     },
                     [],
                 ))
             else:
                 actions.append((
                     self.sql_alter_column_default % {
                         "column": self.quote_name(new_field.column),
                         "default": "%s",
                     },
                     [new_default],
                 ))
     # Nullability change?
     if old_field.null != new_field.null:
         if new_field.null:
             actions.append((
                 self.sql_alter_column_null % {
                     "column": self.quote_name(new_field.column),
                     "type": new_type,
                 },
                 [],
             ))
         else:
             actions.append((
                 self.sql_alter_column_not_null % {
                     "column": self.quote_name(new_field.column),
                     "type": new_type,
                 },
                 [],
             ))
     if actions:
         # Combine actions together if we can (e.g. postgres)
         if self.connection.features.supports_combined_alters:
             sql, params = tuple(zip(*actions))
             actions = [(", ".join(sql), reduce(operator.add, params))]
         # Apply those actions
         for sql, params in actions:
             self.execute(
                 self.sql_alter_column % {
                     "table": self.quote_name(model._meta.db_table),
                     "changes": sql,
                 },
                 params,
             )
     # Added a unique?
     if not old_field.unique and new_field.unique:
         self.execute(
             self.sql_create_unique % {
                 "table":
                 self.quote_name(model._meta.db_table),
                 "name":
                 self._create_index_name(model, [new_field.column],
                                         suffix="_uniq"),
                 "columns":
                 self.quote_name(new_field.column),
             })
     # Added an index?
     if not old_field.db_index and new_field.db_index and not new_field.unique and not (
             not old_field.unique and new_field.unique):
         self.execute(
             self.sql_create_index % {
                 "table":
                 self.quote_name(model._meta.db_table),
                 "name":
                 self._create_index_name(model, [new_field.column],
                                         suffix="_uniq"),
                 "columns":
                 self.quote_name(new_field.column),
                 "extra":
                 "",
             })
     # Changed to become primary key?
     # Note that we don't detect unsetting of a PK, as we assume another field
     # will always come along and replace it.
     if not old_field.primary_key and new_field.primary_key:
         # First, drop the old PK
         constraint_names = self._constraint_names(model, primary_key=True)
         if strict and len(constraint_names) != 1:
             raise ValueError(
                 "Found wrong number (%s) of PK constraints for %s" % (
                     len(constraint_names),
                     model._meta.db_table,
                 ))
         for constraint_name in constraint_names:
             self.execute(
                 self.sql_delete_pk % {
                     "table": self.quote_name(model._meta.db_table),
                     "name": constraint_name,
                 }, )
         # Make the new one
         self.execute(
             self.sql_create_pk % {
                 "table":
                 self.quote_name(model._meta.db_table),
                 "name":
                 self._create_index_name(model, [new_field.column],
                                         suffix="_pk"),
                 "columns":
                 self.quote_name(new_field.column),
             })
     # Does it have a foreign key?
     if new_field.rel:
         self.execute(
             self.sql_create_fk % {
                 "table":
                 self.quote_name(model._meta.db_table),
                 "name":
                 self._create_index_name(model, [new_field.column],
                                         suffix="_fk"),
                 "column":
                 self.quote_name(new_field.column),
                 "to_table":
                 self.quote_name(new_field.rel.to._meta.db_table),
                 "to_column":
                 self.quote_name(new_field.rel.get_related_field().column),
             })
     # Does it have check constraints we need to add?
     if old_db_params['check'] != new_db_params['check'] and new_db_params[
             'check']:
         self.execute(
             self.sql_create_check % {
                 "table":
                 self.quote_name(model._meta.db_table),
                 "name":
                 self._create_index_name(model, [new_field.column],
                                         suffix="_check"),
                 "column":
                 self.quote_name(new_field.column),
                 "check":
                 new_db_params['check'],
             })
     # Reset connection if required
     if self.connection.features.connection_persists_old_columns:
         self.connection.close()
Esempio n. 32
0
def url_join(*args):
    return reduce(_reduce_url_parts, args)
Esempio n. 33
0
 def render_data(self, state, review_request):
     """Return the rendered contents of the column."""
     groups = review_request.target_groups.all()
     return reduce(lambda a, d: a + d.name + ' ', groups, '')
Esempio n. 34
0
 def render_data(self, state, review_request):
     groups = review_request.target_groups.all()
     return reduce(lambda a, d: a + d.name + ' ', groups, '')
Esempio n. 35
0
    def autocomplete_view(self, request):
        """
        Searches in the fields of the given related model and returns the
        result as a simple string to be used by the jQuery Autocomplete plugin
        """
        query = request.GET.get('q', None)
        app_label = request.GET.get('app_label', None)
        model_name = request.GET.get('model_name', None)
        search_fields = request.GET.get('search_fields', None)
        object_pk = request.GET.get('object_pk', None)

        try:
            to_string_function = self.related_string_functions[model_name]
        except KeyError:
            to_string_function = lambda x: six.text_type(x)

        if search_fields and app_label and model_name and (query or object_pk):

            def construct_search(field_name):
                # use different lookup methods depending on the notation
                if field_name.startswith('^'):
                    fmt, name = "{}__istartswith", field_name[1:]
                elif field_name.startswith('='):
                    fmt, name = "{}__iexact", field_name[1:]
                elif field_name.startswith('@'):
                    fmt, name = "{}__search", field_name[1:]
                else:
                    fmt, name = "{}__icontains", field_name
                return fmt.format(name)

            model = apps.get_model(app_label, model_name)
            queryset = model._default_manager.all()
            data = ''
            if query:
                for bit in query.split():
                    or_queries = [
                        models.Q(**{construct_search(smart_str(field_name)): smart_str(bit)})
                        for field_name
                        in search_fields.split(',')
                    ]
                    other_qs = QuerySet(model)
                    other_qs.query.select_related = queryset.query.select_related
                    other_qs = other_qs.filter(reduce(operator.or_, or_queries))
                    queryset = queryset & other_qs

                if self.autocomplete_limit:
                    queryset = queryset[:self.autocomplete_limit]

                data = ''.join([
                    '{}|{}\n'.format(to_string_function(f), f.pk)
                    for f
                    in queryset
                ])
            elif object_pk:
                try:
                    obj = queryset.get(pk=object_pk)
                except:
                    pass
                else:
                    data = to_string_function(obj)
            return HttpResponse(data)
        return HttpResponseNotFound()
Esempio n. 36
0
 def compress(self, value):
     from django.utils.six.moves import reduce
     if value:
         return "{}".format(reduce(add, map(lambda x: mul(*x), zip(map(float, value), self.SECONDS))))
     return None
Esempio n. 37
0
    def get_records(self,data):
        # TODO: convalida data
        qs = self.get_queryset()

        # search
        search  = data.get('search',[])
        filters = []
        for param in search:
            term     = param['value']
            field    = param['field']
            typ      = param['type']
            operator = param['operator']
            if field == 'recid':
                field = 'pk'
            type_search = ""
            if operator == "contains":
                type_search = '__i'+operator
            elif operator == "in":
                type_search = '__'+operator
            elif operator == "between":
                type_search = '__range'
            elif operator == "begins":
                type_search = '__istartswith'
            elif operator == "ends":
                type_search = '__iendswith'
            elif operator == "is":
                type_search = "__exact"
            filters.append((Q(**{field+type_search: term})))
        if filters:
            searchLogic = data.get('searchLogic','AND')
            if searchLogic == "AND":
                searchLogic = and_
            else:
                searchLogic = or_
            qs = qs.filter(reduce(searchLogic, filters))
        
        # sort
        sort = data.get('sort',[])
        order = []
        for param in sort:
            field     = param['field']
            if field == "recid":
                field = self.model._meta.pk.get_attname()
            direction = param['direction']
            if direction == 'desc':
                field = '-' + field
            order.append(field)
        if order:
            qs = qs.order_by(*order)

        # fields
        qs = qs.values('pk',*self.fields)

        # pagination
        page_size = data.get('limit',1)
        start_index = data.get('offset',0)
        paginator = Paginator(qs, page_size)
        num_page = (start_index / page_size) + 1
        page = paginator.page(num_page)

        return self.success(data={
                "total"   : page.paginator.count,
                "records" : list(page.object_list),
        })
Esempio n. 38
0
 def _has_changed(self, initial, data):
     data = [int(d) for d in data]
     if not data:
         return (not initial)
     else:
         return (initial != reduce(operator.or_, data))
Esempio n. 39
0
    def contribute_to_class(self, cls, name):
        self.generic_rel_name = '%s_generic_rel' % name
        self.raw_file_field_name = '%s_raw' % name
        self.file_field_name = name

        self.set_attributes_from_name(name)
        self.file_kwargs['db_column'] = self.db_column or self.attname

        # Save a reference to which model this class is on for future use
        self.model = cls

        super(GenericRelation, self).contribute_to_class(cls, name, **{
            ('private_only' if django.VERSION > (1, 10) else 'virtual_only'): True,
        })

        self.column = self.file_kwargs['db_column']

        if not isinstance(self.file_field_cls, models.ImageField):
            self.file_kwargs.pop('width_field', None)
            self.file_kwargs.pop('height_field', None)
        else:
            if not self.file_kwargs['width_field']:
                del self.file_kwargs['width_field']
            if not self.file_kwargs['height_field']:
                del self.file_kwargs['height_field']

        self.__dict__['file_field'] = self.file_field_cls(name=name, **self.file_kwargs)
        ### HACK: manually fix creation counter
        self.file_field.creation_counter = self.creation_counter

        # This calls contribute_to_class() for the FileField
        parents = cls._meta.parents.keys()
        parent_field_names = []
        if parents:
            parent_fields = reduce(operator.add, [p._meta.local_fields for p in parents], [])
            parent_field_names = [f.name for f in parent_fields]
        # Don't duplicate the field when inherited from a parent model
        if self.file_field_name not in parent_field_names:
            # Don't add field to proxy models
            if not cls._meta.proxy:
                cls.add_to_class(self.file_field_name, self.file_field)

        # Add the descriptor for the generic relation
        generic_descriptor = GenericForeignFileDescriptor(self, self.file_field,
            for_concrete_model=self.for_concrete_model)
        # We use self.__dict__ to avoid triggering __get__()
        self.__dict__['generic_descriptor'] = generic_descriptor
        setattr(cls, self.generic_rel_name, generic_descriptor)

        # Add the descriptor for the FileField
        file_descriptor = GenericForeignFileDescriptor(self, self.file_field,
            is_file_field=True, for_concrete_model=self.for_concrete_model)
        self.__dict__['file_descriptor'] = file_descriptor
        setattr(cls, self.file_field_name, file_descriptor)

        self.file_field.__dict__.update({
            'generic_descriptor': generic_descriptor,
            'file_descriptor': file_descriptor,
            'db_field': self,
            'generic_field': getattr(cls, self.generic_rel_name),
        })
        setattr(cls, self.raw_file_field_name, self.file_descriptor_cls(self.file_field))
Esempio n. 40
0
 def render_data(self, review_request):
     people = review_request.target_people.all()
     return reduce(lambda a, d: a + d.username + ' ', people, '')
Esempio n. 41
0
        def alter_field(self, model, old_field, new_field, strict=False):
            """
            Allows a field's type, uniqueness, nullability, default, column,
            constraints etc. to be modified.
            Requires a copy of the old field as well so we can only perform
            changes that are required.
            If strict is true, raises errors if the old column does not match old_field precisely.
            """
            # Ensure this field is even column-based
            old_db_params = old_field.db_parameters(connection=self.connection)
            old_type = old_db_params['type']
            new_db_params = new_field.db_parameters(connection=self.connection)
            new_type = new_db_params['type']
            if old_type is None and new_type is None \
                and (old_field.rel.through and new_field.rel.through and old_field.rel.through._meta.auto_created
                    and new_field.rel.through._meta.auto_created):
                return self._alter_many_to_many(model, old_field, new_field,
                                                strict)
            elif old_type is None or new_type is None:
                raise ValueError(
                    "Cannot alter field %s into %s - they are not compatible types "
                    "(probably means only one is an M2M with implicit through model)"
                    % (
                        old_field,
                        new_field,
                    ))
            # Has unique been removed?
            if old_field.unique and (not new_field.unique or
                                     (not old_field.primary_key
                                      and new_field.primary_key)):
                # Find the unique constraint for this field
                constraint_names = self._constraint_names(model,
                                                          [old_field.column],
                                                          unique=True)
                if strict and len(constraint_names) != 1:
                    raise ValueError(
                        "Found wrong number (%s) of unique constraints for %s.%s"
                        % (
                            len(constraint_names),
                            model._meta.db_table,
                            old_field.column,
                        ))
                for constraint_name in constraint_names:
                    self.execute(*self._delete_db_constraint_sql(
                        model, constraint_name, constraint_type='unique'))
            # Removed an index?
            if old_field.db_index and not new_field.db_index and not old_field.unique \
                    and not (not new_field.unique and old_field.unique):
                # Find the index for this field
                index_names = self._constraint_names(model, [old_field.column],
                                                     index=True)
                if strict and len(index_names) != 1:
                    raise ValueError(
                        "Found wrong number (%s) of indexes for %s.%s" % (
                            len(index_names),
                            model._meta.db_table,
                            old_field.column,
                        ))
                for index_name in index_names:
                    self.execute(*self._delete_db_constraint_sql(
                        model, index_name, constraint_type='index'))
            # Drop any FK constraints, we'll remake them later
            if old_field.rel:
                fk_names = self._constraint_names(model, [old_field.column],
                                                  foreign_key=True)
                if strict and len(fk_names) != 1:
                    raise ValueError(
                        "Found wrong number (%s) of foreign key constraints for %s.%s"
                        % (
                            len(fk_names),
                            model._meta.db_table,
                            old_field.column,
                        ))
                for fk_name in fk_names:
                    self.execute(*self._delete_db_constraint_sql(
                        model, fk_name, constraint_type='fk'))
            # Drop incoming FK constraints if we're a primary key and things are going
            # to change.
            if old_field.primary_key and new_field.primary_key and old_type != new_type:
                for rel in new_field.model._meta.get_all_related_objects():
                    rel_fk_names = self._constraint_names(rel.model,
                                                          [rel.field.column],
                                                          foreign_key=True)
                    for fk_name in rel_fk_names:
                        self.execute(*self._delete_db_constraint_sql(
                            model, fk_name, constraint_type='fk'))
            # Change check constraints?
            if old_db_params['check'] != new_db_params[
                    'check'] and old_db_params['check']:
                constraint_names = self._constraint_names(model,
                                                          [old_field.column],
                                                          check=True)
                if strict and len(constraint_names) != 1:
                    raise ValueError(
                        "Found wrong number (%s) of check constraints for %s.%s"
                        % (
                            len(constraint_names),
                            model._meta.db_table,
                            old_field.column,
                        ))
                for constraint_name in constraint_names:
                    self.execute(*self._delete_db_constraint_sql(
                        model, constraint_name, constraint_type='check'))
            # Have they renamed the column?
            if old_field.column != new_field.column:
                self.rename_db_column(model, old_field.column,
                                      new_field.column, new_type)
            # Next, start accumulating actions to do
            actions = []
            post_actions = []
            # Type change?
            if old_type != new_type:
                type_actions = self._alter_db_column_sql(
                    model,
                    new_field.column,
                    'type',
                    values={
                        'type': new_type,
                        'old_type': old_type,
                    },
                    fragment=True,
                )
                actions.extend(type_actions[0])
                post_actions.extend(type_actions[1])
            # Default change?
            old_default = self.effective_default(old_field)
            new_default = self.effective_default(new_field)
            if old_default != new_default:
                if new_default is None:
                    default_actions = self._alter_db_column_sql(
                        model, new_field.column, 'no_default', fragment=True)
                else:
                    default_sql, default_params = self.prepare_default(
                        new_default)
                    default_actions = self._alter_db_column_sql(
                        model,
                        new_field.column,
                        'default',
                        values={'default': default_sql},
                        fragment=True,
                        params=default_params)
                actions.extend(default_actions[0])
                post_actions.extend(default_actions[1])
            # Nullability change?
            if old_field.null != new_field.null:
                alteration = 'null' if new_field.null else 'not_null'
                null_actions = self._alter_db_column_sql(
                    model,
                    new_field.column,
                    alteration,
                    values={'type': new_type},
                    fragment=True)
                actions.extend(null_actions[0])
                post_actions.extend(null_actions[1])
            if actions:
                # Combine actions together if we can (e.g. postgres)
                if self.connection.features.supports_combined_alters:
                    sql, params = tuple(zip(*actions))
                    actions = [(", ".join(sql), reduce(operator.add, params))]
                # Apply those actions
                for sql, params in actions:
                    if sql:
                        self.execute(
                            self.sql_alter_column % {
                                "table": self.quote_name(model._meta.db_table),
                                "changes": sql,
                            },
                            params,
                        )
            if post_actions:
                for sql, params in post_actions:
                    if sql:
                        self.execute(sql, params)
            # Added a unique?
            if not old_field.unique and new_field.unique:
                self.execute(*self._create_db_constraint_sql(
                    model, new_field.column, 'unique'))
            # Added an index?
            if not old_field.db_index and new_field.db_index and not new_field.unique \
                    and not (not old_field.unique and new_field.unique):
                self.execute(*self._create_db_constraint_sql(
                    model, new_field.column, 'unique'))
            # Type alteration on primary key? Then we need to alter the column
            # referring to us.
            rels_to_update = []
            if old_field.primary_key and new_field.primary_key and old_type != new_type:
                rels_to_update.extend(
                    new_field.model._meta.get_all_related_objects())
            # Changed to become primary key?
            # Note that we don't detect unsetting of a PK, as we assume another field
            # will always come along and replace it.
            if not old_field.primary_key and new_field.primary_key:
                # First, drop the old PK
                constraint_names = self._constraint_names(model,
                                                          primary_key=True)
                if strict and len(constraint_names) != 1:
                    raise ValueError(
                        "Found wrong number (%s) of PK constraints for %s" % (
                            len(constraint_names),
                            model._meta.db_table,
                        ))
                for constraint_name in constraint_names:
                    self.execute(*self._delete_db_constraint_sql(
                        model, constraint_name, constraint_type='pk'))
                # Make the new one
                self.execute(*self._create_db_constraint_sql(
                    model, new_field.column, 'pk'))
                # Update all referencing columns
                rels_to_update.extend(
                    new_field.model._meta.get_all_related_objects())
            # Handle our type alters on the other end of rels from the PK stuff above
            for rel in rels_to_update:
                # rel_db_params = rel.field.db_parameters(connection=self.connection)
                # rel_type = rel_db_params['type']
                type_actions = self._alter_db_column_sql(
                    rel.model,
                    rel.field.column,
                    'type',
                    values={
                        'type': new_type,
                        'old_type': old_type,
                    },
                )

            # Does it have a foreign key?
            if new_field.rel:
                self.execute(*self._create_db_constraint_sql(
                    model,
                    new_field.column,
                    'fk',
                    values={
                        "to_table":
                        self.quote_name(new_field.rel.to._meta.db_table),
                        "to_column":
                        self.quote_name(
                            new_field.rel.get_related_field().column),
                    }))
            # Rebuild FKs that pointed to us if we previously had to drop them
            if old_field.primary_key and new_field.primary_key and old_type != new_type:
                for rel in new_field.model._meta.get_all_related_objects():
                    self.execute(*self._create_db_constraint_sql(
                        model,
                        new_field.column,
                        'fk',
                        values={
                            "to_table": self.quote_name(model._meta.db_table),
                            "to_column": self.quote_name(new_field.column),
                        }))
            # Does it have check constraints we need to add?
            if old_db_params['check'] != new_db_params[
                    'check'] and new_db_params['check']:
                self.execute(*self._create_db_constraint_sql(
                    model,
                    new_field.column,
                    'check',
                    values={
                        'check': new_db_params['check'],
                    }))
            # Reset connection if required
            if self.connection.features.connection_persists_old_columns:
                self.connection.close()
Esempio n. 42
0
def index(request, **args):
    site_url = '%s://%s' % (request.is_secure() and 'https' or 'http',
                            request.get_host())
    page = {
        'ctx': args.get('ctx', ''),
        'backtime': True,
        'robots': 'index',
        'public': False,
        'site_url': site_url,
        'base_url': settings.BASE_URL,
        'login_url': settings.LOGIN_URL,
        'favicon': settings.FAVICON,
        'author_name': settings.FEED_AUTHOR_NAME,
        'author_uri': getattr(settings, 'FEED_AUTHOR_URI', False),
        'taguri': settings.FEED_TAGURI,
        'icon': settings.FEED_ICON,
        'maps_engine': settings.MAPS_ENGINE,
        'fb_app_id': settings.FACEBOOK_APP_ID,
        'pshb_hubs': settings.PSHB_HUBS,
    }
    authed = request.user.is_authenticated() and request.user.is_staff
    friend = request.user.is_authenticated() and not request.user.is_staff
    urlparams = []
    entries_on_page = settings.ENTRIES_ON_PAGE
    entries_orderby = 'date_published'

    # Entries filter.
    fs = {'active': True, 'service__home': True}

    # Filter by dates.
    year = int(args.get('year', 0))
    month = int(args.get('month', 0))
    day = int(args.get('day', 0))
    if year:
        fs[entries_orderby + '__year'] = year
    if month:
        fs[entries_orderby + '__month'] = month
    if day:
        fs[entries_orderby + '__day'] = day
    if year and month and day:
        dt = datetime.date(year, month, day).strftime('%Y/%m/%d')
    elif year and month:
        dt = datetime.date(year, month, 1)
        prev, next = pn_month_start(dt)
        page['month_nav'] = True
        page['month_prev'] = prev.strftime('%Y/%m')
        page['month_next'] = next.strftime('%Y/%m')
        dt = dt.strftime('%Y/%m')
    elif year:
        dt = datetime.date(year, 1, 1).strftime('%Y')

    if year:
        page['backtime'] = False
        page['title'] = dt
        page['subtitle'] = _(
            'You are currently browsing the archive for %s') % ('<b>' + dt + '</b>')
        page['robots'] = 'noindex'

    if page['backtime']:
        entries = Entry.objects.order_by('-' + entries_orderby)
    else:
        entries = Entry.objects.order_by(entries_orderby)

    if not authed:
        fs['draft'] = False
    if not authed or page['ctx'] == 'public':
        fs['service__public'] = True
        page['public'] = True

    # Filter for favorites.
    if page['ctx'] == 'favorites':
        if not authed:
            return HttpResponseRedirect(settings.BASE_URL + '/')
        favs = Favorite.objects.filter(user__id=request.user.id)
        page['favorites'] = True
        page['title'] = _('Favorites')
        page['subtitle'] = _(
            'You are currently browsing your favorite entries')
        fs['id__in'] = favs.values('entry')

    # Filter lists.
    elif 'list' in args:
        try:
            services = List.objects.get(user__id=request.user.id,
                                        slug=args['list']).services
            del fs['service__home']
            fs['service__id__in'] = services.values('id')
            page['ctx'] = 'list/' + args['list']
            page['title'] = args['list']
            page['subtitle'] = _('You are currently browsing entries from %s list only.') % (
                '<b>' + args['list'] + '</b>')
        except List.DoesNotExist:
            if authed:
                raise Http404

    # Filter for exactly one given entry.
    elif 'entry' in args:
        fs['id__exact'] = int(args['entry'])
        page['exactentry'] = True
        if authed and 'service__public' in fs:
            del fs['service__public']

    if not authed:
        page['ctx'] = ''

    # Filter by class type.
    cls = request.GET.get('class', 'all')
    if cls != 'all':
        fs['service__cls'] = cls
        urlparams.append('class=' + cls)
        page['robots'] = 'noindex'
        if 'subtitle' in page:
            page['subtitle'] += ' <b>(%s)</b>' % escape(cls.capitalize())
        else:
            page['subtitle'] = _('You are currently browsing %s entries only.') % (
                '<b>' + escape(cls) + '</b>')

    # Filter by author name.
    author = request.GET.get('author', 'all')
    if author != 'all':
        fs['author_name'] = author
        urlparams.append('author=' + author)
        page['robots'] = 'noindex'

    # Filter by service type.
    srvapi = request.GET.get('service', 'all')
    if srvapi != 'all':
        fs['service__api'] = srvapi
        urlparams.append('service=' + srvapi)
        page['robots'] = 'noindex'
        srvapi_name = dict(API_LIST).get(srvapi, srvapi.capitalize())
        if 'subtitle' in page:
            page['subtitle'] += ' <b>(%s)</b>' % escape(srvapi_name)
        else:
            page['subtitle'] = _('You are currently browsing entries from %s service only.') % (
                '<b>' + escape(srvapi_name) + '</b>')

    # Filter entries after specified timestamp 'start'.
    after = False
    start = request.GET.get('start', False)
    if start:
        qs = fs.copy()
        try:
            dt = datetime.datetime.fromtimestamp(float(start))
        except ValueError:
            raise Http404

        if page['backtime']:
            fs[entries_orderby + '__lte'] = dt
            qs[entries_orderby + '__gt'] = fs[entries_orderby + '__lte']
            q = Entry.objects.order_by(entries_orderby)
        else:
            fs[entries_orderby + '__gte'] = dt
            qs[entries_orderby + '__lt'] = fs[entries_orderby + '__gte']
            q = Entry.objects.order_by('-' + entries_orderby)

        q = q.filter(**qs)[0:entries_on_page].values(entries_orderby)
        if len(q):
            after = q[len(q) - 1][entries_orderby]
            after = int(time.mktime(after.timetuple()))
        page['title'] = '%s' % str(dt)[0:-3]
        page['robots'] = 'noindex'

    # Search/Query entries.
    search_enable = getattr(settings, 'SEARCH_ENABLE', False)
    search_engine = getattr(settings, 'SEARCH_ENGINE', 'sphinx')
    search_query = request.GET.get('s', '')

    if search_query != '' and search_enable:
        page['search'] = search_query
        page['title'] = 'Search Results for %s' % escape(search_query)
        page['subtitle'] = _('Your search for %s returned the following results.') % (
            '<b>' + escape(search_query) + '</b>')
        urlparams.append('s=' + search_query)
        sfs = {}
        if not authed and not friend:
            sfs['friends_only'] = False
        page_number = int(request.GET.get('page', 1))
        offset = (page_number - 1) * entries_on_page

        try:
            if search_engine == 'sphinx':
                select = "SELECT * FROM %s WHERE MATCH('%s')"
                if page['public']:
                    select += ' AND public=1'
                if 'friends_only' in sfs and sfs['friends_only'] == False:
                    select += ' AND friends_only=0'
                select += ' LIMIT 1000'

                cursor = connections['sphinx'].cursor()
                cursor.execute(select % (settings.SPHINX_INDEX_NAME,
                                         search_query))
                res = __dictfetchall(cursor)
                uids = [ent['id'] for ent in res]
                entries = entries.filter(id__in=uids).select_related()
            else:  # db search
                if page['public']:
                    sfs['service__public'] = True
                sfs['content__icontains'] = search_query
                entries = entries.filter(**sfs).select_related()

            limit = offset + entries_on_page
            if offset >= entries_on_page:
                page['prevpage'] = page_number - 1
            if limit < entries.count():
                page['nextpage'] = page_number + 1

            entries = entries[offset:limit]
        except:
            entries = []

        start = False

    # If not search, then normal query.
    else:
        entries = entries.filter(**fs)[0:entries_on_page + 1].select_related()
        num = len(entries)

        if 'exactentry' in page and num:
            page['title'] = truncatewords(entries[0].title, 7)

        # Time-based pagination.
        if num > entries_on_page:
            start = entries[num - 1].__getattribute__(entries_orderby)
            start = int(time.mktime(start.timetuple()))
        else:
            start = False

        entries = entries[0:entries_on_page]

        if num:
            crymax = entries[0].date_published.year
            crymin = entries[len(entries) - 1].date_published.year
            if crymin != crymax:
                page['copyright_years'] = '%s-%s' % (crymin, crymax)
            else:
                page['copyright_years'] = crymin

    # Build URL params for links.
    if len(urlparams):
        urlparams = '?' + reduce(lambda x, y: six.text_type(x) + '&' +
                                 six.text_type(y), urlparams, '')[1:] + '&'
    else:
        urlparams = '?'

    if len(entries):
        page['updated'] = entries[0].date_published
    else:
        page['updated'] = datetime.datetime.utcnow()
    page['urlparams'] = urlparams
    page['start'] = start
    page['after'] = after

    if hasattr(settings, 'STREAM_TITLE'):
        page_title = settings.STREAM_TITLE
    else:
        page_title = None

    if hasattr(settings, 'STREAM_DESCRIPTION'):
        page['description'] = settings.STREAM_DESCRIPTION

    # Set page theme.
    page['themes'] = settings.THEMES
    page['themes_more'] = True if len(settings.THEMES) > 1 else False
    page['theme'] = common.get_theme(request)

    # Setup links.
    page['need_fbc'] = False
    for entry in entries:
        entry.only_for_friends = entry.friends_only

        if authed or friend:
            entry.friends_only = False
        elif entry.friends_only:
            page['need_fbc'] = True

        if not entry.friends_only:
            entry.gls_link = '%s/%s' % (urlresolvers.reverse('entry', args=[entry.id]),
                                        gls_slugify(truncatewords(entry.title, 7)))
        else:
            entry.gls_link = '%s/' % (
                urlresolvers.reverse('entry', args=[entry.id]))
            if 'title' in page:
                del page['title']

        entry.gls_absolute_link = '%s%s' % (page['site_url'], entry.gls_link)

    # Check single-entry URL
    if 'exactentry' in page:
        if len(entries):
            gls_link = entries[0].gls_link
            if gls_link != request.path:
                return HttpResponsePermanentRedirect(gls_link)
            page['canonical_link'] = urllib.parse.urljoin(
                settings.BASE_URL, gls_link)
        else:
            raise Http404

    if 'title' in page and page['title'] != '':
        if page_title:
            page['title'] += getattr(settings, 'STREAM_TITLE_SUFFIX',
                                     ' | ' + page_title)
    elif page_title:
        page['title'] = page_title

    # Pickup right output format and finish.
    format = request.GET.get('format', 'html')
    if format == 'atom':
        return render_to_response('stream.atom',
                                  {'entries': entries,
                                   'page': page},
                                  content_type='application/atom+xml')
    elif format == 'json':
        cb = request.GET.get('callback', False)
        return render_to_response('stream.json',
                                  {'entries': entries,
                                   'page': page,
                                   'callback': cb},
                                  content_type='application/json')
    elif format == 'html-pure' and request.is_ajax():
        # Check which entry is already favorite.
        if authed and page['ctx'] != 'favorites':
            ents = [entry.id for entry in entries]
            favs = Favorite.objects.filter(user__id=request.user.id,
                                           entry__id__in=ents)
            favs = [f.entry_id for f in favs]
            for entry in entries:
                if entry.id in favs:
                    entry.fav = True
                if entry.service.api in ('twitter', 'identica'):
                    entry.sms = True
        d = {
            'next': page['start'],
            'stream': strip_spaces_between_tags(
                render_to_string('stream-pure.html',
                                 {'entries': entries,
                                  'page': page,
                                  'authed': authed,
                                  'friend': friend})),
        }
        if 'nextpage' in page:
            d['next'] = page['nextpage']
        return HttpResponse(json.dumps(d), content_type='application/json')
    elif format != 'html':
        raise Http404
    else:
        # Check which entry is already favorite.
        if authed and page['ctx'] != 'favorites':
            ents = [entry.id for entry in entries]
            favs = Favorite.objects.filter(user__id=request.user.id,
                                           entry__id__in=ents)
            favs = [f.entry_id for f in favs]
            for entry in entries:
                if entry.id in favs:
                    entry.fav = True
                if entry.service.api in ('twitter', 'identica'):
                    entry.sms = True

        # Get lists.
        lists = List.objects.filter(
            user__id=request.user.id).order_by('name')

        # Get archives.
        if 'entry' in args:
            qs = {}
        else:
            qs = fs.copy()
            if year:
                del qs[entries_orderby + '__year']
            if month:
                del qs[entries_orderby + '__month']
            if day:
                del qs[entries_orderby + '__day']
        archs = Entry.objects.filter(**qs).dates('date_published',
                                                 'month', order='DESC')
        page['months12'] = [datetime.date(2010, x, 1) for x in range(1, 13)]

        # List available classes.
        fs = {}
        if not authed or page['ctx'] == 'public':
            fs['public'] = True
        _classes = Service.objects.filter (**fs).order_by ('id')\
            .values('api', 'cls')
        classes = {}
        for item in _classes:
            if item['cls'] not in classes:
                classes[item['cls']] = item
        classes = list(classes.values())

        accept_lang = request.META.get('HTTP_ACCEPT_LANGUAGE', '').split(',')
        for i, lang in enumerate(accept_lang):
            accept_lang[i] = lang.split(';')[0]
        page['lang'] = accept_lang[0]

        request.user.fb_username = request.session.get('fb_username', '')
        request.user.fb_profile_url = request.session.get(
            'fb_profile_url', '')

        res = render_to_response('stream.html',
                                 {'classes': classes,
                                  'entries': entries,
                                  'lists': lists,
                                  'archives': archs,
                                  'page': page,
                                  'authed': authed,
                                  'friend': friend,
                                  'has_search': search_enable,
                                  'is_secure': request.is_secure(),
                                  'user': request.user})
        res['X-XRDS-Location'] = request.build_absolute_uri(
            urlresolvers.reverse('glifestream.gauth.views.xrds'))
        return res
Esempio n. 43
0
 def media(self):
     return reduce(add, (form.media for form in self.forms.values()))
Esempio n. 44
0
 def media(self):
     return reduce(add, (form.media for form in self.forms.values()))
Esempio n. 45
0
 def handle_merge(self, loader, conflicts):
     """
     Handles merging together conflicted migrations interactively,
     if it's safe; otherwise, advises on how to fix it.
     """
     if self.interactive:
         questioner = InteractiveMigrationQuestioner()
     else:
         questioner = MigrationQuestioner()
     for app_label, migration_names in conflicts.items():
         # Grab out the migrations in question, and work out their
         # common ancestor.
         merge_migrations = []
         for migration_name in migration_names:
             migration = loader.get_migration(app_label, migration_name)
             migration.ancestry = loader.graph.forwards_plan(
                 (app_label, migration_name))
             merge_migrations.append(migration)
         common_ancestor = None
         for level in zip(*[m.ancestry for m in merge_migrations]):
             if reduce(operator.eq, level):
                 common_ancestor = level[0]
             else:
                 break
         if common_ancestor is None:
             raise ValueError("Could not find common ancestor of %s" %
                              migration_names)
         # Now work out the operations along each divergent branch
         for migration in merge_migrations:
             migration.branch = migration.ancestry[(
                 migration.ancestry.index(common_ancestor) + 1):]
             migration.merged_operations = []
             for node_app, node_name in migration.branch:
                 migration.merged_operations.extend(
                     loader.get_migration(node_app, node_name).operations)
         # In future, this could use some of the Optimizer code
         # (can_optimize_through) to automatically see if they're
         # mergeable. For now, we always just prompt the user.
         if self.verbosity > 0:
             self.stdout.write(
                 self.style.MIGRATE_HEADING("Merging %s" % app_label))
             for migration in merge_migrations:
                 self.stdout.write(
                     self.style.MIGRATE_LABEL("  Branch %s" %
                                              migration.name))
                 for operation in migration.merged_operations:
                     self.stdout.write("    - %s\n" % operation.describe())
         if questioner.ask_merge(app_label):
             # If they still want to merge it, then write out an empty
             # file depending on the migrations needing merging.
             numbers = [
                 MigrationAutodetector.parse_number(migration.name)
                 for migration in merge_migrations
             ]
             try:
                 biggest_number = max([x for x in numbers if x is not None])
             except ValueError:
                 biggest_number = 1
             subclass = type(
                 "Migration", (migrations.Migration, ), {
                     "dependencies": [(app_label, migration.name)
                                      for migration in merge_migrations],
                 })
             new_migration = subclass("%04i_merge" % (biggest_number + 1),
                                      app_label)
             writer = MigrationWriter(new_migration)
             with open(writer.path, "wb") as fh:
                 fh.write(writer.as_string())
             if self.verbosity > 0:
                 self.stdout.write("\nCreated new merge migration %s" %
                                   writer.path)
Esempio n. 46
0
 def render_data(self, state, review_request):
     """Return the rendered contents of the column."""
     groups = review_request.target_groups.all()
     return reduce(lambda a, d: a + d.name + ' ', groups, '')
Esempio n. 47
0
 def render_data(self, state, review_request):
     people = review_request.target_people.all()
     return reduce(lambda a, d: a + d.username + ' ', people, '')
Esempio n. 48
0
    def get_records(self, data):
        # TODO: convalida data
        qs = self.get_queryset()

        # search
        search = data.get('search', [])
        filters = []
        for param in search:
            term = param['value']
            field = param['field']
            typ = param['type']
            operator = param['operator']
            if field == 'recid':
                field = 'pk'
            type_search = ""
            if operator == "contains":
                type_search = '__i' + operator
            elif operator == "in":
                type_search = '__' + operator
            elif operator == "between":
                type_search = '__range'
            elif operator == "begins":
                type_search = '__istartswith'
            elif operator == "ends":
                type_search = '__iendswith'
            elif operator == "is":
                type_search = "__exact"
            filters.append((Q(**{field + type_search: term})))
        if filters:
            searchLogic = data.get('searchLogic', 'AND')
            if searchLogic == "AND":
                searchLogic = and_
            else:
                searchLogic = or_
            qs = qs.filter(reduce(searchLogic, filters))

        # sort
        sort = data.get('sort', [])
        order = []
        for param in sort:
            field = param['field']
            if field == "recid":
                field = self.model._meta.pk.get_attname()
            direction = param['direction']
            if direction == 'desc':
                field = '-' + field
            order.append(field)
        if order:
            qs = qs.order_by(*order)

        # fields
        qs = qs.values('pk', *self.fields)

        # pagination
        page_size = data.get('limit', 1)
        start_index = data.get('offset', 0)
        paginator = Paginator(qs, page_size)
        num_page = (start_index / page_size) + 1
        page = paginator.page(num_page)

        return self.success(data={
            "total": page.paginator.count,
            "records": list(page.object_list),
        })
Esempio n. 49
0
 def render_data(self, state, review_request):
     """Return the rendered contents of the column."""
     people = review_request.target_people.all()
     return reduce(lambda a, d: a + d.username + ' ', people, '')
Esempio n. 50
0
    def _alter_field(self, model, old_field, new_field, old_type, new_type,
                     old_db_params, new_db_params, strict=False):
        """Actually perform a "physical" (non-ManyToMany) field update."""

        # Has unique been removed?
        if old_field.unique and (not new_field.unique or (not old_field.primary_key and new_field.primary_key)):
            # Find the unique constraint for this field
            constraint_names = self._constraint_names(model, [old_field.column], unique=True)
            if strict and len(constraint_names) != 1:
                raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % (
                    len(constraint_names),
                    model._meta.db_table,
                    old_field.column,
                ))
            for constraint_name in constraint_names:
                self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name))
        # Drop any FK constraints, we'll remake them later
        fks_dropped = set()
        if old_field.rel and old_field.db_constraint:
            fk_names = self._constraint_names(model, [old_field.column], foreign_key=True)
            if strict and len(fk_names) != 1:
                raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % (
                    len(fk_names),
                    model._meta.db_table,
                    old_field.column,
                ))
            for fk_name in fk_names:
                fks_dropped.add((old_field.column,))
                self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name))
        # Drop incoming FK constraints if we're a primary key and things are going
        # to change.
        if old_field.primary_key and new_field.primary_key and old_type != new_type:
            for rel in new_field.model._meta.get_all_related_objects():
                rel_fk_names = self._constraint_names(rel.model, [rel.field.column], foreign_key=True)
                for fk_name in rel_fk_names:
                    self.execute(self._delete_constraint_sql(self.sql_delete_fk, rel.model, fk_name))
        # Removed an index?
        if (old_field.db_index and not new_field.db_index and
                not old_field.unique and not
                (not new_field.unique and old_field.unique)):
            # Find the index for this field
            index_names = self._constraint_names(model, [old_field.column], index=True)
            if strict and len(index_names) != 1:
                raise ValueError("Found wrong number (%s) of indexes for %s.%s" % (
                    len(index_names),
                    model._meta.db_table,
                    old_field.column,
                ))
            for index_name in index_names:
                self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
        # Change check constraints?
        if old_db_params['check'] != new_db_params['check'] and old_db_params['check']:
            constraint_names = self._constraint_names(model, [old_field.column], check=True)
            if strict and len(constraint_names) != 1:
                raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % (
                    len(constraint_names),
                    model._meta.db_table,
                    old_field.column,
                ))
            for constraint_name in constraint_names:
                self.execute(self._delete_constraint_sql(self.sql_delete_check, model, constraint_name))
        # Have they renamed the column?
        if old_field.column != new_field.column:
            self.execute(self.sql_rename_column % {
                "table": self.quote_name(model._meta.db_table),
                "old_column": self.quote_name(old_field.column),
                "new_column": self.quote_name(new_field.column),
                "type": new_type,
            })
        # Next, start accumulating actions to do
        actions = []
        post_actions = []
        # Type change?
        if old_type != new_type:
            fragment, other_actions = self._alter_column_type_sql(model._meta.db_table, new_field.column, new_type)
            actions.append(fragment)
            post_actions.extend(other_actions)
        # Default change?
        old_default = self.effective_default(old_field)
        new_default = self.effective_default(new_field)
        if old_default != new_default:
            if new_default is None:
                actions.append((
                    self.sql_alter_column_no_default % {
                        "column": self.quote_name(new_field.column),
                    },
                    [],
                ))
            else:
                if self.connection.features.requires_literal_defaults:
                    # Some databases can't take defaults as a parameter (oracle)
                    # If this is the case, the individual schema backend should
                    # implement prepare_default
                    actions.append((
                        self.sql_alter_column_default % {
                            "column": self.quote_name(new_field.column),
                            "default": self.prepare_default(new_default),
                        },
                        [],
                    ))
                else:
                    actions.append((
                        self.sql_alter_column_default % {
                            "column": self.quote_name(new_field.column),
                            "default": "%s",
                        },
                        [new_default],
                    ))
        # Nullability change?
        if old_field.null != new_field.null:
            if new_field.null:
                actions.append((
                    self.sql_alter_column_null % {
                        "column": self.quote_name(new_field.column),
                        "type": new_type,
                    },
                    [],
                ))
            else:
                actions.append((
                    self.sql_alter_column_not_null % {
                        "column": self.quote_name(new_field.column),
                        "type": new_type,
                    },
                    [],
                ))
        if actions:
            # Combine actions together if we can (e.g. postgres)
            if self.connection.features.supports_combined_alters:
                sql, params = tuple(zip(*actions))
                actions = [(", ".join(sql), reduce(operator.add, params))]
            # Apply those actions
            for sql, params in actions:
                self.execute(
                    self.sql_alter_column % {
                        "table": self.quote_name(model._meta.db_table),
                        "changes": sql,
                    },
                    params,
                )
        if post_actions:
            for sql, params in post_actions:
                self.execute(sql, params)
        # Added a unique?
        if not old_field.unique and new_field.unique:
            self.execute(self._create_unique_sql(model, [new_field.column]))
        # Added an index?
        if (not old_field.db_index and new_field.db_index and
                not new_field.unique and not
                (not old_field.unique and new_field.unique)):
            self.execute(self._create_index_sql(model, [new_field], suffix="_uniq"))
        # Type alteration on primary key? Then we need to alter the column
        # referring to us.
        rels_to_update = []
        if old_field.primary_key and new_field.primary_key and old_type != new_type:
            rels_to_update.extend(new_field.model._meta.get_all_related_objects())
        # Changed to become primary key?
        # Note that we don't detect unsetting of a PK, as we assume another field
        # will always come along and replace it.
        if not old_field.primary_key and new_field.primary_key:
            # First, drop the old PK
            constraint_names = self._constraint_names(model, primary_key=True)
            if strict and len(constraint_names) != 1:
                raise ValueError("Found wrong number (%s) of PK constraints for %s" % (
                    len(constraint_names),
                    model._meta.db_table,
                ))
            for constraint_name in constraint_names:
                self.execute(self._delete_constraint_sql(self.sql_delete_pk, model, constraint_name))
            # Make the new one
            self.execute(
                self.sql_create_pk % {
                    "table": self.quote_name(model._meta.db_table),
                    "name": self.quote_name(self._create_index_name(model, [new_field.column], suffix="_pk")),
                    "columns": self.quote_name(new_field.column),
                }
            )
            # Update all referencing columns
            rels_to_update.extend(new_field.model._meta.get_all_related_objects())
        # Handle our type alters on the other end of rels from the PK stuff above
        for rel in rels_to_update:
            rel_db_params = rel.field.db_parameters(connection=self.connection)
            rel_type = rel_db_params['type']
            self.execute(
                self.sql_alter_column % {
                    "table": self.quote_name(rel.model._meta.db_table),
                    "changes": self.sql_alter_column_type % {
                        "column": self.quote_name(rel.field.column),
                        "type": rel_type,
                    }
                }
            )
        # Does it have a foreign key?
        if new_field.rel and \
           (fks_dropped or (old_field.rel and not old_field.db_constraint)) and \
           new_field.db_constraint:
            self.execute(self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s"))
        # Rebuild FKs that pointed to us if we previously had to drop them
        if old_field.primary_key and new_field.primary_key and old_type != new_type:
            for rel in new_field.model._meta.get_all_related_objects():
                self.execute(self._create_fk_sql(rel.model, rel.field, "_fk"))
        # Does it have check constraints we need to add?
        if old_db_params['check'] != new_db_params['check'] and new_db_params['check']:
            self.execute(
                self.sql_create_check % {
                    "table": self.quote_name(model._meta.db_table),
                    "name": self.quote_name(self._create_index_name(model, [new_field.column], suffix="_check")),
                    "column": self.quote_name(new_field.column),
                    "check": new_db_params['check'],
                }
            )
        # Drop the default if we need to
        # (Django usually does not use in-database defaults)
        if not self.skip_default(new_field) and new_field.default is not None:
            sql = self.sql_alter_column % {
                "table": self.quote_name(model._meta.db_table),
                "changes": self.sql_alter_column_no_default % {
                    "column": self.quote_name(new_field.column),
                }
            }
            self.execute(sql)
        # Reset connection if required
        if self.connection.features.connection_persists_old_columns:
            self.connection.close()
Esempio n. 51
0
def append_lookup_keys(model, fields):
    return moves.reduce(set.union, (append_lookup_key(model, field) for field in fields), set())
Esempio n. 52
0
    def alter_field(self, model, old_field, new_field, strict=False):
        """
        Allows a field's type, uniqueness, nullability, default, column,
        constraints etc. to be modified.
        Requires a copy of the old field as well so we can only perform
        changes that are required.
        If strict is true, raises errors if the old column does not match old_field precisely.
        """
        # Ensure this field is even column-based
        old_db_params = old_field.db_parameters(connection=self.connection)
        old_type = old_db_params['type']
        new_db_params = new_field.db_parameters(connection=self.connection)
        new_type = new_db_params['type']
        if old_type is None and new_type is None \
            and (old_field.rel.through and new_field.rel.through and old_field.rel.through._meta.auto_created
                and new_field.rel.through._meta.auto_created):
            return self._alter_many_to_many(model, old_field, new_field, strict)
        elif old_type is None or new_type is None:
            raise ValueError(
                "Cannot alter field %s into %s - they are not compatible types "
                "(probably means only one is an M2M with implicit through model)" % (
                    old_field,
                    new_field,
                )
            )
        # Has unique been removed?
        if old_field.unique and (not new_field.unique or (not old_field.primary_key and new_field.primary_key)):
            # Find the unique constraint for this field
            constraint_names = self._constraint_names(model, [old_field.column], unique=True)
            if strict and len(constraint_names) != 1:
                raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % (
                    len(constraint_names),
                    model._meta.db_table,
                    old_field.column,
                ))
            for constraint_name in constraint_names:
                self.execute(*self._delete_db_constraint_sql(model, constraint_name, constraint_type='unique'))
        # Removed an index?
        if old_field.db_index and not new_field.db_index and not old_field.unique \
                and not (not new_field.unique and old_field.unique):
            # Find the index for this field
            index_names = self._constraint_names(model, [old_field.column], index=True)
            if strict and len(index_names) != 1:
                raise ValueError("Found wrong number (%s) of indexes for %s.%s" % (
                    len(index_names),
                    model._meta.db_table,
                    old_field.column,
                ))
            for index_name in index_names:
                self.execute(*self._delete_db_constraint_sql(model, index_name, constraint_type='index'))
        # Drop any FK constraints, we'll remake them later
        if old_field.rel:
            fk_names = self._constraint_names(model, [old_field.column], foreign_key=True)
            if strict and len(fk_names) != 1:
                raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % (
                    len(fk_names),
                    model._meta.db_table,
                    old_field.column,
                ))
            for fk_name in fk_names:
                self.execute(*self._delete_db_constraint_sql(model, fk_name, constraint_type='fk'))
        # Drop incoming FK constraints if we're a primary key and things are going
        # to change.
        if old_field.primary_key and new_field.primary_key and old_type != new_type:
            for rel in new_field.model._meta.get_all_related_objects():
                rel_fk_names = self._constraint_names(rel.model, [rel.field.column], foreign_key=True)
                for fk_name in rel_fk_names:
                    self.execute(*self._delete_db_constraint_sql(model, fk_name, constraint_type='fk'))
        # Change check constraints?
        if old_db_params['check'] != new_db_params['check'] and old_db_params['check']:
            constraint_names = self._constraint_names(model, [old_field.column], check=True)
            if strict and len(constraint_names) != 1:
                raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % (
                    len(constraint_names),
                    model._meta.db_table,
                    old_field.column,
                ))
            for constraint_name in constraint_names:
                self.execute(*self._delete_db_constraint_sql(model, constraint_name, constraint_type='check'))
        # Have they renamed the column?
        if old_field.column != new_field.column:
            self.rename_db_column(model, old_field.column, new_field.column, new_type)
        # Next, start accumulating actions to do
        actions = []
        post_actions = []
        # Type change?
        if old_type != new_type:
            type_actions = self._alter_db_column_sql(model, new_field.column, 'type',
                values={
                    'type': new_type,
                    'old_type': old_type,
                },
                fragment=True,
            )
            actions.extend(type_actions[0])
            post_actions.extend(type_actions[1])
        # Default change?
        old_default = self.effective_default(old_field)
        new_default = self.effective_default(new_field)
        if old_default != new_default:
            if new_default is None:
                default_actions = self._alter_db_column_sql(model, new_field.column, 'no_default',
                    fragment=True)
            else:
                default_sql, default_params = self.prepare_default(new_default)
                default_actions = self._alter_db_column_sql(model, new_field.column, 'default',
                    values={'default': default_sql}, fragment=True, params=default_params)
            actions.extend(default_actions[0])
            post_actions.extend(default_actions[1])
        # Nullability change?
        if old_field.null != new_field.null:
            alteration = 'null' if new_field.null else 'not_null'
            null_actions = self._alter_db_column_sql(model, new_field.column, alteration,
                values={'type': new_type}, fragment=True)
            actions.extend(null_actions[0])
            post_actions.extend(null_actions[1])
        if actions:
            # Combine actions together if we can (e.g. postgres)
            if self.connection.features.supports_combined_alters:
                sql, params = tuple(zip(*actions))
                actions = [(", ".join(sql), reduce(operator.add, params))]
            # Apply those actions
            for sql, params in actions:
                if sql:
                    self.execute(
                        self.sql_alter_column % {
                            "table": self.quote_name(model._meta.db_table),
                            "changes": sql,
                        },
                        params,
                    )
        if post_actions:
            for sql, params in post_actions:
                if sql:
                    self.execute(sql, params)
        # Added a unique?
        if not old_field.unique and new_field.unique:
            self.execute(*self._create_db_constraint_sql(model, new_field.column, 'unique'))
        # Added an index?
        if not old_field.db_index and new_field.db_index and not new_field.unique \
                and not (not old_field.unique and new_field.unique):
            self.execute(*self._create_db_constraint_sql(model, new_field.column, 'unique'))
        # Type alteration on primary key? Then we need to alter the column
        # referring to us.
        rels_to_update = []
        if old_field.primary_key and new_field.primary_key and old_type != new_type:
            rels_to_update.extend(new_field.model._meta.get_all_related_objects())
        # Changed to become primary key?
        # Note that we don't detect unsetting of a PK, as we assume another field
        # will always come along and replace it.
        if not old_field.primary_key and new_field.primary_key:
            # First, drop the old PK
            constraint_names = self._constraint_names(model, primary_key=True)
            if strict and len(constraint_names) != 1:
                raise ValueError("Found wrong number (%s) of PK constraints for %s" % (
                    len(constraint_names),
                    model._meta.db_table,
                ))
            for constraint_name in constraint_names:
                self.execute(*self._delete_db_constraint_sql(model, constraint_name, constraint_type='pk'))
            # Make the new one
            self.execute(*self._create_db_constraint_sql(model, new_field.column, 'pk'))
            # Update all referencing columns
            rels_to_update.extend(new_field.model._meta.get_all_related_objects())
        # Handle our type alters on the other end of rels from the PK stuff above
        for rel in rels_to_update:
            # rel_db_params = rel.field.db_parameters(connection=self.connection)
            # rel_type = rel_db_params['type']
            type_actions = self._alter_db_column_sql(rel.model, rel.field.column, 'type',
                values={
                    'type': new_type,
                    'old_type': old_type,
                },
            )

        # Does it have a foreign key?
        if new_field.rel:
            self.execute(*self._create_db_constraint_sql(model, new_field.column, 'fk', values={
                "to_table": self.quote_name(new_field.rel.to._meta.db_table),
                "to_column": self.quote_name(new_field.rel.get_related_field().column),
            }))
        # Rebuild FKs that pointed to us if we previously had to drop them
        if old_field.primary_key and new_field.primary_key and old_type != new_type:
            for rel in new_field.model._meta.get_all_related_objects():
                self.execute(*self._create_db_constraint_sql(model, new_field.column, 'fk', values={
                    "to_table": self.quote_name(model._meta.db_table),
                    "to_column": self.quote_name(new_field.column),
                }))
        # Does it have check constraints we need to add?
        if old_db_params['check'] != new_db_params['check'] and new_db_params['check']:
            self.execute(*self._create_db_constraint_sql(model, new_field.column, 'check', values={
                'check': new_db_params['check'],
            }))
        # Reset connection if required
        if self.connection.features.connection_persists_old_columns:
            self.connection.close()
Esempio n. 53
0
File: schema.py Progetto: anshp/new
    def _alter_field(self,
                     model,
                     old_field,
                     new_field,
                     old_type,
                     new_type,
                     old_db_params,
                     new_db_params,
                     strict=False):
        """Actually perform a "physical" (non-ManyToMany) field update."""

        # Has unique been removed?
        if old_field.unique and (not new_field.unique or
                                 (not old_field.primary_key
                                  and new_field.primary_key)):
            # Find the unique constraint for this field
            constraint_names = self._constraint_names(model,
                                                      [old_field.column],
                                                      unique=True)
            if strict and len(constraint_names) != 1:
                raise ValueError(
                    "Found wrong number (%s) of unique constraints for %s.%s" %
                    (
                        len(constraint_names),
                        model._meta.db_table,
                        old_field.column,
                    ))
            for constraint_name in constraint_names:
                self.execute(
                    self._delete_constraint_sql(self.sql_delete_unique, model,
                                                constraint_name))
        # Drop any FK constraints, we'll remake them later
        fks_dropped = set()
        if old_field.rel and old_field.db_constraint:
            fk_names = self._constraint_names(model, [old_field.column],
                                              foreign_key=True)
            if strict and len(fk_names) != 1:
                raise ValueError(
                    "Found wrong number (%s) of foreign key constraints for %s.%s"
                    % (
                        len(fk_names),
                        model._meta.db_table,
                        old_field.column,
                    ))
            for fk_name in fk_names:
                fks_dropped.add((old_field.column, ))
                self.execute(
                    self._delete_constraint_sql(self.sql_delete_fk, model,
                                                fk_name))
        # Drop incoming FK constraints if we're a primary key and things are going
        # to change.
        if old_field.primary_key and new_field.primary_key and old_type != new_type:
            for rel in new_field.model._meta.get_all_related_objects():
                rel_fk_names = self._constraint_names(rel.model,
                                                      [rel.field.column],
                                                      foreign_key=True)
                for fk_name in rel_fk_names:
                    self.execute(
                        self._delete_constraint_sql(self.sql_delete_fk,
                                                    rel.model, fk_name))
        # Removed an index?
        if old_field.db_index and not new_field.db_index and not old_field.unique and not (
                not new_field.unique and old_field.unique):
            # Find the index for this field
            index_names = self._constraint_names(model, [old_field.column],
                                                 index=True)
            if strict and len(index_names) != 1:
                raise ValueError(
                    "Found wrong number (%s) of indexes for %s.%s" % (
                        len(index_names),
                        model._meta.db_table,
                        old_field.column,
                    ))
            for index_name in index_names:
                self.execute(
                    self._delete_constraint_sql(self.sql_delete_index, model,
                                                index_name))
        # Change check constraints?
        if old_db_params['check'] != new_db_params['check'] and old_db_params[
                'check']:
            constraint_names = self._constraint_names(model,
                                                      [old_field.column],
                                                      check=True)
            if strict and len(constraint_names) != 1:
                raise ValueError(
                    "Found wrong number (%s) of check constraints for %s.%s" %
                    (
                        len(constraint_names),
                        model._meta.db_table,
                        old_field.column,
                    ))
            for constraint_name in constraint_names:
                self.execute(
                    self._delete_constraint_sql(self.sql_delete_check, model,
                                                constraint_name))
        # Have they renamed the column?
        if old_field.column != new_field.column:
            self.execute(
                self.sql_rename_column % {
                    "table": self.quote_name(model._meta.db_table),
                    "old_column": self.quote_name(old_field.column),
                    "new_column": self.quote_name(new_field.column),
                    "type": new_type,
                })
        # Next, start accumulating actions to do
        actions = []
        null_actions = []
        post_actions = []
        # Type change?
        if old_type != new_type:
            fragment, other_actions = self._alter_column_type_sql(
                model._meta.db_table, new_field.column, new_type)
            actions.append(fragment)
            post_actions.extend(other_actions)
        # When changing a column NULL constraint to NOT NULL with a given
        # default value, we need to perform 4 steps:
        #  1. Add a default for new incoming writes
        #  2. Update existing NULL rows with new default
        #  3. Replace NULL constraint with NOT NULL
        #  4. Drop the default again.
        # Default change?
        old_default = self.effective_default(old_field)
        new_default = self.effective_default(new_field)
        if old_default != new_default:
            if new_default is None:
                actions.append((
                    self.sql_alter_column_no_default % {
                        "column": self.quote_name(new_field.column),
                    },
                    [],
                ))
            else:
                if self.connection.features.requires_literal_defaults:
                    # Some databases can't take defaults as a parameter (oracle)
                    # If this is the case, the individual schema backend should
                    # implement prepare_default
                    actions.append((
                        self.sql_alter_column_default % {
                            "column": self.quote_name(new_field.column),
                            "default": self.prepare_default(new_default),
                        },
                        [],
                    ))
                else:
                    actions.append((
                        self.sql_alter_column_default % {
                            "column": self.quote_name(new_field.column),
                            "default": "%s",
                        },
                        [new_default],
                    ))
        # Nullability change?
        if old_field.null != new_field.null:
            if new_field.null:
                null_actions.append((
                    self.sql_alter_column_null % {
                        "column": self.quote_name(new_field.column),
                        "type": new_type,
                    },
                    [],
                ))
            else:
                null_actions.append((
                    self.sql_alter_column_not_null % {
                        "column": self.quote_name(new_field.column),
                        "type": new_type,
                    },
                    [],
                ))
        # Only if we have a default and there is a change from NULL to NOT NULL
        four_way_default_alteration = (new_field.has_default() and
                                       (old_field.null and not new_field.null))
        if actions or null_actions:
            if not four_way_default_alteration:
                # If we don't have to do a 4-way default alteration we can
                # directly run a (NOT) NULL alteration
                actions = actions + null_actions
            # Combine actions together if we can (e.g. postgres)
            if self.connection.features.supports_combined_alters:
                sql, params = tuple(zip(*actions))
                actions = [(", ".join(sql), reduce(operator.add, params))]
            # Apply those actions
            for sql, params in actions:
                self.execute(
                    self.sql_alter_column % {
                        "table": self.quote_name(model._meta.db_table),
                        "changes": sql,
                    },
                    params,
                )
            if four_way_default_alteration:
                # Update existing rows with default value
                self.execute(
                    self.sql_update_with_default % {
                        "table": self.quote_name(model._meta.db_table),
                        "column": self.quote_name(new_field.column),
                        "default": "%s",
                    },
                    [new_default],
                )
                # Since we didn't run a NOT NULL change before we need to do it
                # now
                for sql, params in null_actions:
                    self.execute(
                        self.sql_alter_column % {
                            "table": self.quote_name(model._meta.db_table),
                            "changes": sql,
                        },
                        params,
                    )
        if post_actions:
            for sql, params in post_actions:
                self.execute(sql, params)
        # Added a unique?
        if not old_field.unique and new_field.unique:
            self.execute(self._create_unique_sql(model, [new_field.column]))
        # Added an index?
        if not old_field.db_index and new_field.db_index and not new_field.unique and not (
                not old_field.unique and new_field.unique):
            self.execute(
                self.sql_create_index % {
                    "table":
                    self.quote_name(model._meta.db_table),
                    "name":
                    self._create_index_name(model, [new_field.column],
                                            suffix="_uniq"),
                    "columns":
                    self.quote_name(new_field.column),
                    "extra":
                    "",
                })
        # Type alteration on primary key? Then we need to alter the column
        # referring to us.
        rels_to_update = []
        if old_field.primary_key and new_field.primary_key and old_type != new_type:
            rels_to_update.extend(
                new_field.model._meta.get_all_related_objects())
        # Changed to become primary key?
        # Note that we don't detect unsetting of a PK, as we assume another field
        # will always come along and replace it.
        if not old_field.primary_key and new_field.primary_key:
            # First, drop the old PK
            constraint_names = self._constraint_names(model, primary_key=True)
            if strict and len(constraint_names) != 1:
                raise ValueError(
                    "Found wrong number (%s) of PK constraints for %s" % (
                        len(constraint_names),
                        model._meta.db_table,
                    ))
            for constraint_name in constraint_names:
                self.execute(
                    self._delete_constraint_sql(self.sql_delete_pk, model,
                                                constraint_name))
            # Make the new one
            self.execute(
                self.sql_create_pk % {
                    "table":
                    self.quote_name(model._meta.db_table),
                    "name":
                    self.quote_name(
                        self._create_index_name(model, [new_field.column],
                                                suffix="_pk")),
                    "columns":
                    self.quote_name(new_field.column),
                })
            # Update all referencing columns
            rels_to_update.extend(
                new_field.model._meta.get_all_related_objects())
        # Handle our type alters on the other end of rels from the PK stuff above
        for rel in rels_to_update:
            rel_db_params = rel.field.db_parameters(connection=self.connection)
            rel_type = rel_db_params['type']
            self.execute(
                self.sql_alter_column % {
                    "table": self.quote_name(rel.model._meta.db_table),
                    "changes": self.sql_alter_column_type % {
                        "column": self.quote_name(rel.field.column),
                        "type": rel_type,
                    }
                })
        # Does it have a foreign key?
        if new_field.rel and \
           (fks_dropped or (old_field.rel and not old_field.db_constraint)) and \
           new_field.db_constraint:
            self.execute(
                self._create_fk_sql(model, new_field,
                                    "_fk_%(to_table)s_%(to_column)s"))
        # Rebuild FKs that pointed to us if we previously had to drop them
        if old_field.primary_key and new_field.primary_key and old_type != new_type:
            for rel in new_field.model._meta.get_all_related_objects():
                self.execute(self._create_fk_sql(rel.model, rel.field, "_fk"))
        # Does it have check constraints we need to add?
        if old_db_params['check'] != new_db_params['check'] and new_db_params[
                'check']:
            self.execute(
                self.sql_create_check % {
                    "table":
                    self.quote_name(model._meta.db_table),
                    "name":
                    self.quote_name(
                        self._create_index_name(model, [new_field.column],
                                                suffix="_check")),
                    "column":
                    self.quote_name(new_field.column),
                    "check":
                    new_db_params['check'],
                })
        # Drop the default if we need to
        # (Django usually does not use in-database defaults)
        if not self.skip_default(new_field) and new_field.default is not None:
            sql = self.sql_alter_column % {
                "table": self.quote_name(model._meta.db_table),
                "changes": self.sql_alter_column_no_default % {
                    "column": self.quote_name(new_field.column),
                }
            }
            self.execute(sql)
        # Reset connection if required
        if self.connection.features.connection_persists_old_columns:
            self.connection.close()
Esempio n. 54
0
 def alter_field(self, model, old_field, new_field, strict=False):
     """
     Allows a field's type, uniqueness, nullability, default, column,
     constraints etc. to be modified.
     Requires a copy of the old field as well so we can only perform
     changes that are required.
     If strict is true, raises errors if the old column does not match old_field precisely.
     """
     # Ensure this field is even column-based
     old_db_params = old_field.db_parameters(connection=self.connection)
     old_type = old_db_params['type']
     new_db_params = new_field.db_parameters(connection=self.connection)
     new_type = new_db_params['type']
     if old_type is None and new_type is None and (old_field.rel.through and new_field.rel.through and old_field.rel.through._meta.auto_created and new_field.rel.through._meta.auto_created):
         return self._alter_many_to_many(model, old_field, new_field, strict)
     elif old_type is None or new_type is None:
         raise ValueError("Cannot alter field %s into %s - they are not compatible types (probably means only one is an M2M with implicit through model)" % (
             old_field,
             new_field,
         ))
     # Has unique been removed?
     if old_field.unique and (not new_field.unique or (not old_field.primary_key and new_field.primary_key)):
         # Find the unique constraint for this field
         constraint_names = self._constraint_names(model, [old_field.column], unique=True)
         if strict and len(constraint_names) != 1:
             raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % (
                 len(constraint_names),
                 model._meta.db_table,
                 old_field.column,
             ))
         for constraint_name in constraint_names:
             self.execute(
                 self.sql_delete_unique % {
                     "table": self.quote_name(model._meta.db_table),
                     "name": constraint_name,
                 },
             )
     # Removed an index?
     if old_field.db_index and not new_field.db_index and not old_field.unique and not (not new_field.unique and old_field.unique):
         # Find the index for this field
         index_names = self._constraint_names(model, [old_field.column], index=True)
         if strict and len(index_names) != 1:
             raise ValueError("Found wrong number (%s) of indexes for %s.%s" % (
                 len(index_names),
                 model._meta.db_table,
                 old_field.column,
             ))
         for index_name in index_names:
             self.execute(
                 self.sql_delete_index % {
                     "table": self.quote_name(model._meta.db_table),
                     "name": index_name,
                 }
             )
     # Drop any FK constraints, we'll remake them later
     if old_field.rel:
         fk_names = self._constraint_names(model, [old_field.column], foreign_key=True)
         if strict and len(fk_names) != 1:
             raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % (
                 len(fk_names),
                 model._meta.db_table,
                 old_field.column,
             ))
         for fk_name in fk_names:
             self.execute(
                 self.sql_delete_fk % {
                     "table": self.quote_name(model._meta.db_table),
                     "name": fk_name,
                 }
             )
     # Change check constraints?
     if old_db_params['check'] != new_db_params['check'] and old_db_params['check']:
         constraint_names = self._constraint_names(model, [old_field.column], check=True)
         if strict and len(constraint_names) != 1:
             raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % (
                 len(constraint_names),
                 model._meta.db_table,
                 old_field.column,
             ))
         for constraint_name in constraint_names:
             self.execute(
                 self.sql_delete_check % {
                     "table": self.quote_name(model._meta.db_table),
                     "name": constraint_name,
                 }
             )
     # Have they renamed the column?
     if old_field.column != new_field.column:
         self.execute(self.sql_rename_column % {
             "table": self.quote_name(model._meta.db_table),
             "old_column": self.quote_name(old_field.column),
             "new_column": self.quote_name(new_field.column),
             "type": new_type,
         })
     # Next, start accumulating actions to do
     actions = []
     # Type change?
     if old_type != new_type:
         actions.append((
             self.sql_alter_column_type % {
                 "column": self.quote_name(new_field.column),
                 "type": new_type,
             },
             [],
         ))
     # Default change?
     old_default = self.effective_default(old_field)
     new_default = self.effective_default(new_field)
     if old_default != new_default:
         if new_default is None:
             actions.append((
                 self.sql_alter_column_no_default % {
                     "column": self.quote_name(new_field.column),
                 },
                 [],
             ))
         else:
             if self.connection.features.requires_literal_defaults:
                 # Some databases can't take defaults as a parameter (oracle)
                 # If this is the case, the individual schema backend should
                 # implement prepare_default
                 actions.append((
                     self.sql_alter_column_default % {
                         "column": self.quote_name(new_field.column),
                         "default": self.prepare_default(new_default),
                     },
                     [],
                 ))
             else:
                 actions.append((
                     self.sql_alter_column_default % {
                         "column": self.quote_name(new_field.column),
                         "default": "%s",
                     },
                     [new_default],
                 ))
     # Nullability change?
     if old_field.null != new_field.null:
         if new_field.null:
             actions.append((
                 self.sql_alter_column_null % {
                     "column": self.quote_name(new_field.column),
                     "type": new_type,
                 },
                 [],
             ))
         else:
             actions.append((
                 self.sql_alter_column_not_null % {
                     "column": self.quote_name(new_field.column),
                     "type": new_type,
                 },
                 [],
             ))
     if actions:
         # Combine actions together if we can (e.g. postgres)
         if self.connection.features.supports_combined_alters:
             sql, params = tuple(zip(*actions))
             actions = [(", ".join(sql), reduce(operator.add, params))]
         # Apply those actions
         for sql, params in actions:
             self.execute(
                 self.sql_alter_column % {
                     "table": self.quote_name(model._meta.db_table),
                     "changes": sql,
                 },
                 params,
             )
     # Added a unique?
     if not old_field.unique and new_field.unique:
         self.execute(
             self.sql_create_unique % {
                 "table": self.quote_name(model._meta.db_table),
                 "name": self._create_index_name(model, [new_field.column], suffix="_uniq"),
                 "columns": self.quote_name(new_field.column),
             }
         )
     # Added an index?
     if not old_field.db_index and new_field.db_index and not new_field.unique and not (not old_field.unique and new_field.unique):
         self.execute(
             self.sql_create_index % {
                 "table": self.quote_name(model._meta.db_table),
                 "name": self._create_index_name(model, [new_field.column], suffix="_uniq"),
                 "columns": self.quote_name(new_field.column),
                 "extra": "",
             }
         )
     # Changed to become primary key?
     # Note that we don't detect unsetting of a PK, as we assume another field
     # will always come along and replace it.
     if not old_field.primary_key and new_field.primary_key:
         # First, drop the old PK
         constraint_names = self._constraint_names(model, primary_key=True)
         if strict and len(constraint_names) != 1:
             raise ValueError("Found wrong number (%s) of PK constraints for %s" % (
                 len(constraint_names),
                 model._meta.db_table,
             ))
         for constraint_name in constraint_names:
             self.execute(
                 self.sql_delete_pk % {
                     "table": self.quote_name(model._meta.db_table),
                     "name": constraint_name,
                 },
             )
         # Make the new one
         self.execute(
             self.sql_create_pk % {
                 "table": self.quote_name(model._meta.db_table),
                 "name": self._create_index_name(model, [new_field.column], suffix="_pk"),
                 "columns": self.quote_name(new_field.column),
             }
         )
     # Does it have a foreign key?
     if new_field.rel:
         self.execute(
             self.sql_create_fk % {
                 "table": self.quote_name(model._meta.db_table),
                 "name": self._create_index_name(model, [new_field.column], suffix="_fk"),
                 "column": self.quote_name(new_field.column),
                 "to_table": self.quote_name(new_field.rel.to._meta.db_table),
                 "to_column": self.quote_name(new_field.rel.get_related_field().column),
             }
         )
     # Does it have check constraints we need to add?
     if old_db_params['check'] != new_db_params['check'] and new_db_params['check']:
         self.execute(
             self.sql_create_check % {
                 "table": self.quote_name(model._meta.db_table),
                 "name": self._create_index_name(model, [new_field.column], suffix="_check"),
                 "column": self.quote_name(new_field.column),
                 "check": new_db_params['check'],
             }
         )
     # Reset connection if required
     if self.connection.features.connection_persists_old_columns:
         self.connection.close()
Esempio n. 55
0
 def render_data(self, state, review_request):
     """Return the rendered contents of the column."""
     people = review_request.target_people.all()
     return reduce(lambda a, d: a + d.username + ' ', people, '')
Esempio n. 56
0
    def _alter_field(self,
                     model,
                     old_field,
                     new_field,
                     old_type,
                     new_type,
                     old_db_params,
                     new_db_params,
                     strict=False):
        """Actually perform a "physical" (non-ManyToMany) field update."""

        # Has unique been removed?
        if old_field.unique and (not new_field.unique or
                                 (not old_field.primary_key
                                  and new_field.primary_key)):
            # Find the unique constraint for this field
            constraint_names = self._constraint_names(model,
                                                      [old_field.column],
                                                      unique=True)
            if strict and len(constraint_names) != 1:
                raise ValueError(
                    "Found wrong number (%s) of unique constraints for %s.%s" %
                    (
                        len(constraint_names),
                        model._meta.db_table,
                        old_field.column,
                    ))
            for constraint_name in constraint_names:
                self.execute(
                    self.sql_delete_unique % {
                        "table": self.quote_name(model._meta.db_table),
                        "name": constraint_name,
                    }, )
        # Removed an index?
        if old_field.db_index and not new_field.db_index and not old_field.unique and not (
                not new_field.unique and old_field.unique):
            # Find the index for this field
            index_names = self._constraint_names(model, [old_field.column],
                                                 index=True)
            if strict and len(index_names) != 1:
                raise ValueError(
                    "Found wrong number (%s) of indexes for %s.%s" % (
                        len(index_names),
                        model._meta.db_table,
                        old_field.column,
                    ))
            for index_name in index_names:
                self.execute(
                    self.sql_delete_index % {
                        "table": self.quote_name(model._meta.db_table),
                        "name": index_name,
                    })
        # Drop any FK constraints, we'll remake them later
        if old_field.rel:
            fk_names = self._constraint_names(model, [old_field.column],
                                              foreign_key=True)
            if strict and len(fk_names) != 1:
                raise ValueError(
                    "Found wrong number (%s) of foreign key constraints for %s.%s"
                    % (
                        len(fk_names),
                        model._meta.db_table,
                        old_field.column,
                    ))
            for fk_name in fk_names:
                self.execute(
                    self.sql_delete_fk % {
                        "table": self.quote_name(model._meta.db_table),
                        "name": fk_name,
                    })
        # Drop incoming FK constraints if we're a primary key and things are going
        # to change.
        if old_field.primary_key and new_field.primary_key and old_type != new_type:
            for rel in new_field.model._meta.get_all_related_objects():
                rel_fk_names = self._constraint_names(rel.model,
                                                      [rel.field.column],
                                                      foreign_key=True)
                for fk_name in rel_fk_names:
                    self.execute(
                        self.sql_delete_fk % {
                            "table": self.quote_name(rel.model._meta.db_table),
                            "name": fk_name,
                        })
        # Change check constraints?
        if old_db_params['check'] != new_db_params['check'] and old_db_params[
                'check']:
            constraint_names = self._constraint_names(model,
                                                      [old_field.column],
                                                      check=True)
            if strict and len(constraint_names) != 1:
                raise ValueError(
                    "Found wrong number (%s) of check constraints for %s.%s" %
                    (
                        len(constraint_names),
                        model._meta.db_table,
                        old_field.column,
                    ))
            for constraint_name in constraint_names:
                self.execute(
                    self.sql_delete_check % {
                        "table": self.quote_name(model._meta.db_table),
                        "name": constraint_name,
                    })
        # Have they renamed the column?
        if old_field.column != new_field.column:
            self.execute(
                self.sql_rename_column % {
                    "table": self.quote_name(model._meta.db_table),
                    "old_column": self.quote_name(old_field.column),
                    "new_column": self.quote_name(new_field.column),
                    "type": new_type,
                })
        # Next, start accumulating actions to do
        actions = []
        post_actions = []
        # Type change?
        if old_type != new_type:
            fragment, other_actions = self._alter_column_type_sql(
                model._meta.db_table, new_field.column, new_type)
            actions.append(fragment)
            post_actions.extend(other_actions)
        # Default change?
        old_default = self.effective_default(old_field)
        new_default = self.effective_default(new_field)
        if old_default != new_default:
            if new_default is None:
                actions.append((
                    self.sql_alter_column_no_default % {
                        "column": self.quote_name(new_field.column),
                    },
                    [],
                ))
            else:
                if self.connection.features.requires_literal_defaults:
                    # Some databases can't take defaults as a parameter (oracle)
                    # If this is the case, the individual schema backend should
                    # implement prepare_default
                    actions.append((
                        self.sql_alter_column_default % {
                            "column": self.quote_name(new_field.column),
                            "default": self.prepare_default(new_default),
                        },
                        [],
                    ))
                else:
                    actions.append((
                        self.sql_alter_column_default % {
                            "column": self.quote_name(new_field.column),
                            "default": "%s",
                        },
                        [new_default],
                    ))
        # Nullability change?
        if old_field.null != new_field.null:
            if new_field.null:
                actions.append((
                    self.sql_alter_column_null % {
                        "column": self.quote_name(new_field.column),
                        "type": new_type,
                    },
                    [],
                ))
            else:
                actions.append((
                    self.sql_alter_column_not_null % {
                        "column": self.quote_name(new_field.column),
                        "type": new_type,
                    },
                    [],
                ))
        if actions:
            # Combine actions together if we can (e.g. postgres)
            if self.connection.features.supports_combined_alters:
                sql, params = tuple(zip(*actions))
                actions = [(", ".join(sql), reduce(operator.add, params))]
            # Apply those actions
            for sql, params in actions:
                self.execute(
                    self.sql_alter_column % {
                        "table": self.quote_name(model._meta.db_table),
                        "changes": sql,
                    },
                    params,
                )
        if post_actions:
            for sql, params in post_actions:
                self.execute(sql, params)
        # Added a unique?
        if not old_field.unique and new_field.unique:
            self.execute(
                self.sql_create_unique % {
                    "table":
                    self.quote_name(model._meta.db_table),
                    "name":
                    self._create_index_name(model, [new_field.column],
                                            suffix="_uniq"),
                    "columns":
                    self.quote_name(new_field.column),
                })
        # Added an index?
        if not old_field.db_index and new_field.db_index and not new_field.unique and not (
                not old_field.unique and new_field.unique):
            self.execute(
                self.sql_create_index % {
                    "table":
                    self.quote_name(model._meta.db_table),
                    "name":
                    self._create_index_name(model, [new_field.column],
                                            suffix="_uniq"),
                    "columns":
                    self.quote_name(new_field.column),
                    "extra":
                    "",
                })
        # Type alteration on primary key? Then we need to alter the column
        # referring to us.
        rels_to_update = []
        if old_field.primary_key and new_field.primary_key and old_type != new_type:
            rels_to_update.extend(
                new_field.model._meta.get_all_related_objects())
        # Changed to become primary key?
        # Note that we don't detect unsetting of a PK, as we assume another field
        # will always come along and replace it.
        if not old_field.primary_key and new_field.primary_key:
            # First, drop the old PK
            constraint_names = self._constraint_names(model, primary_key=True)
            if strict and len(constraint_names) != 1:
                raise ValueError(
                    "Found wrong number (%s) of PK constraints for %s" % (
                        len(constraint_names),
                        model._meta.db_table,
                    ))
            for constraint_name in constraint_names:
                self.execute(
                    self.sql_delete_pk % {
                        "table": self.quote_name(model._meta.db_table),
                        "name": constraint_name,
                    }, )
            # Make the new one
            self.execute(
                self.sql_create_pk % {
                    "table":
                    self.quote_name(model._meta.db_table),
                    "name":
                    self._create_index_name(model, [new_field.column],
                                            suffix="_pk"),
                    "columns":
                    self.quote_name(new_field.column),
                })
            # Update all referencing columns
            rels_to_update.extend(
                new_field.model._meta.get_all_related_objects())
        # Handle our type alters on the other end of rels from the PK stuff above
        for rel in rels_to_update:
            rel_db_params = rel.field.db_parameters(connection=self.connection)
            rel_type = rel_db_params['type']
            self.execute(
                self.sql_alter_column % {
                    "table": self.quote_name(rel.model._meta.db_table),
                    "changes": self.sql_alter_column_type % {
                        "column": self.quote_name(rel.field.column),
                        "type": rel_type,
                    }
                })
        # Does it have a foreign key?
        if new_field.rel:
            self.execute(
                self.sql_create_fk % {
                    "table":
                    self.quote_name(model._meta.db_table),
                    "name":
                    self._create_index_name(model, [new_field.column],
                                            suffix="_fk"),
                    "column":
                    self.quote_name(new_field.column),
                    "to_table":
                    self.quote_name(new_field.rel.to._meta.db_table),
                    "to_column":
                    self.quote_name(new_field.rel.get_related_field().column),
                })
        # Rebuild FKs that pointed to us if we previously had to drop them
        if old_field.primary_key and new_field.primary_key and old_type != new_type:
            for rel in new_field.model._meta.get_all_related_objects():
                self.execute(
                    self.sql_create_fk % {
                        "table":
                        self.quote_name(rel.model._meta.db_table),
                        "name":
                        self._create_index_name(rel.model, [rel.field.column],
                                                suffix="_fk"),
                        "column":
                        self.quote_name(rel.field.column),
                        "to_table":
                        self.quote_name(model._meta.db_table),
                        "to_column":
                        self.quote_name(new_field.column),
                    })
        # Does it have check constraints we need to add?
        if old_db_params['check'] != new_db_params['check'] and new_db_params[
                'check']:
            self.execute(
                self.sql_create_check % {
                    "table":
                    self.quote_name(model._meta.db_table),
                    "name":
                    self._create_index_name(model, [new_field.column],
                                            suffix="_check"),
                    "column":
                    self.quote_name(new_field.column),
                    "check":
                    new_db_params['check'],
                })
        # Reset connection if required
        if self.connection.features.connection_persists_old_columns:
            self.connection.close()
Esempio n. 57
0
def append_lookup_keys(model, fields):
    return moves.reduce(set.union,
                        (append_lookup_key(model, field) for field in fields),
                        set())
Esempio n. 58
0
 def render_data(self, review_request):
     groups = review_request.target_groups.all()
     return reduce(lambda a, d: a + d.name + ' ', groups, '')