def del_cfg_not_in_groups(store): where = And(Not(Config.var_group == u'node'), Not(Config.var_group == u'notification'), Not(Config.var_group == u'private')) res = store.find(Config, where) for c in res: log.info("Removing extra Config <%s>" % c) store.find(Config, where).remove()
def db_fix_fields_attrs(store): ''' Ensures that the current store and the field_attrs.json file correspond. The content of the field_attrs dict is used to add and remove all of the excepted forms of field_attrs for FieldAttrs in the db. ''' # Load the field attributes descriptors field_attrs = {} with file(GLSettings.field_attrs_file, 'r') as f: json_string = f.read() field_attrs = json.loads(json_string) std_lst = [ 'inputbox', 'textarea', 'multichoice', 'checkbox', 'tos', 'date' ] for field_type, attrs_dict in field_attrs.iteritems(): attrs_to_keep_for_type = attrs_dict.keys() if field_type in std_lst: # Ensure that the standard field attrs do not have extra attr rows res = store.find( models.FieldAttr, Not(In(models.FieldAttr.name, attrs_to_keep_for_type)), models.FieldAttr.field_id == models.Field.id, models.Field.type == field_type, models.Field.key == unicode('')) else: # Look for dropped attrs in non-standard field_groups like whistleblower_identity res = store.find( models.FieldAttr, Not(In(models.FieldAttr.name, attrs_to_keep_for_type)), models.FieldAttr.field_id == models.Field.id, models.Field.key == field_type) count = res.count() if count: log.debug("Removing %d attributes from fields of type %s" % (count, field_type)) for r in res: store.remove(r) # Add keys to the db that have been added to field_attrs for field in store.find(models.Field): typ = field.type if field.key == '' else field.key attrs = field_attrs.get(typ, {}) for attr_name, attr_dict in attrs.iteritems(): if not store.find( models.FieldAttr, And(models.FieldAttr.field_id == field.id, models.FieldAttr.name == attr_name)).one(): log.debug("Adding new field attr %s.%s" % (typ, attr_name)) attr_dict['name'] = attr_name field.attrs.add( models.db_forge_obj(store, models.FieldAttr, attr_dict))
def db_fix_fields_attrs(store): """ Ensures that the current store and the field_attrs.json file correspond. The content of the field_attrs dict is used to add and remove all of the excepted forms of field_attrs for FieldAttrs in the db. """ field_attrs = read_json_file(Settings.field_attrs_file) special_lst = ['whistleblower_identity'] std_lst = [ 'inputbox', 'textarea', 'multichoice', 'checkbox', 'tos', 'date' ] for field_type, attrs_dict in field_attrs.items(): attrs_to_keep_for_type = attrs_dict.keys() if field_type in std_lst: # Ensure that the standard field attrs do not have extra attr rows res = store.find( models.FieldAttr, Not(In(models.FieldAttr.name, attrs_to_keep_for_type)), models.FieldAttr.field_id == models.Field.id, models.Field.type == field_type, Not(In(models.Field.id, special_lst))) else: # Look for dropped attrs in non-standard field_groups like whistleblower_identity res = store.find( models.FieldAttr, Not(In(models.FieldAttr.name, attrs_to_keep_for_type)), models.FieldAttr.field_id == models.Field.id, models.Field.id == field_type) count = res.count() if count: log.debug("Removing %d attributes from fields of type %s", count, field_type) for r in res: store.remove(r) # Add keys to the db that have been added to field_attrs for field in store.find(models.Field): typ = field.type if field.id not in special_lst else field.id attrs = field_attrs.get(typ, {}) for attr_name, attr_dict in attrs.items(): if not store.find( models.FieldAttr, And(models.FieldAttr.field_id == field.id, models.FieldAttr.name == attr_name)).one(): log.debug("Adding new field attr %s.%s", typ, attr_name) attr_dict['name'] = attr_name attr_dict['field_id'] = field.id models.db_forge_obj(store, models.FieldAttr, attr_dict)
def get_specification_filters(filter, goalstatus=True): """Return a list of Storm expressions for filtering Specifications. :param filters: A collection of SpecificationFilter and/or strings. Strings are used for text searches. """ clauses = [] # ALL is the trump card. if SpecificationFilter.ALL in filter: return clauses # Look for informational specs. if SpecificationFilter.INFORMATIONAL in filter: clauses.append(Specification.implementation_status == SpecificationImplementationStatus.INFORMATIONAL) # Filter based on completion. See the implementation of # Specification.is_complete() for more details. if SpecificationFilter.COMPLETE in filter: clauses.append(get_specification_completeness_clause()) if SpecificationFilter.INCOMPLETE in filter: clauses.append(Not(get_specification_completeness_clause())) # Filter for goal status. if goalstatus: goalstatus = None if SpecificationFilter.ACCEPTED in filter: goalstatus = SpecificationGoalStatus.ACCEPTED elif SpecificationFilter.PROPOSED in filter: goalstatus = SpecificationGoalStatus.PROPOSED elif SpecificationFilter.DECLINED in filter: goalstatus = SpecificationGoalStatus.DECLINED if goalstatus: clauses.append(Specification.goalstatus == goalstatus) if SpecificationFilter.STARTED in filter: clauses.append(get_specification_started_clause()) # Filter for validity. If we want valid specs only, then we should exclude # all OBSOLETE or SUPERSEDED specs. if SpecificationFilter.VALID in filter: clauses.append( Not( Specification.definition_status.is_in([ SpecificationDefinitionStatus.OBSOLETE, SpecificationDefinitionStatus.SUPERSEDED ]))) # Filter for specification text. for constraint in filter: if isinstance(constraint, basestring): # A string in the filter is a text search filter. clauses.append(fti_search(Specification, constraint)) return clauses
def deleteMultiple(cls, ids): from lp.bugs.model.structuralsubscription import StructuralSubscription store = IStore(BugSubscriptionFilter) structsub_ids = list( store.find(BugSubscriptionFilter.structural_subscription_id, BugSubscriptionFilter.id.is_in(ids))) kinds = [ BugSubscriptionFilterImportance, BugSubscriptionFilterStatus, BugSubscriptionFilterTag, BugSubscriptionFilterInformationType ] for kind in kinds: store.find(kind, kind.filter_id.is_in(ids)).remove() store.find(BugSubscriptionFilter, BugSubscriptionFilter.id.is_in(ids)).remove() # Now delete any structural subscriptions that have no filters. # Take out a SHARE lock on the filters that we use as evidence # for keeping structsubs, to ensure that they haven't been # deleted under us. filter_expr = Select( 1, tables=[BugSubscriptionFilter], where=(BugSubscriptionFilter.structural_subscription_id == StructuralSubscription.id)) locked_filter_expr = SQL( convert_storm_clause_to_string(filter_expr) + ' FOR SHARE') store.find(StructuralSubscription, StructuralSubscription.id.is_in(structsub_ids), Not(Exists(locked_filter_expr))).remove()
def failed_activity(self): return Store.of(self).find( BugWatchActivity, BugWatchActivity.bug_watch == self, Not( BugWatchActivity.result.is_in( BUG_WATCH_ACTIVITY_SUCCESS_STATUSES))).order_by( Desc('activity_date'))
def db_update_fieldattrs(store, field_id, field_attrs, language): attrs_ids = [db_update_fieldattr(store, field_id, attr_name, attr, language) for attr_name, attr in field_attrs.iteritems()] if attrs_ids: store.find(models.FieldAttr, And(models.FieldAttr.field_id == field_id, Not(In(models.FieldAttr.id, attrs_ids)))).remove() else: store.find(models.FieldAttr, And(models.FieldAttr.field_id == field_id)).remove()
def _parse_string_state(self, state, table_field): if not state.text.strip(): return def _like(value): return Like(StoqNormalizeString(table_field), StoqNormalizeString(u'%%%s%%' % value.lower()), case_sensitive=False) if state.mode == StringQueryState.CONTAINS_ALL: queries = [ _like(word) for word in re.split('[ \n\r]', state.text) if word ] retval = And(*queries) elif state.mode == StringQueryState.IDENTICAL_TO: retval = Lower(table_field) == state.text.lower() elif state.mode == StringQueryState.CONTAINS_EXACTLY: retval = (_like(state.text.lower())) elif state.mode == StringQueryState.NOT_CONTAINS: queries = [ Not(_like(word)) for word in state.text.split(' ') if word ] retval = And(*queries) else: # pragma nocoverage raise AssertionError return retval
def db_fix_fields_attrs(store): # Load the field attributes descriptors field_attrs = {} with file(GLSettings.field_attrs_file, 'r') as f: json_string = f.read() field_attrs = json.loads(json_string) # Get the list of keys uptodate_keys = [] for k1 in field_attrs.keys(): for k2 in field_attrs[k1].keys(): field_attrs[k1][k2]['name'] = k2 uptodate_keys.append(k2) # Remove keys that have been removed store.find(models.FieldAttr, Not(In(models.FieldAttr.name, uptodate_keys))).remove() # Add keys that have been added for field in store.find(models.Field): attrs = field_attrs.get(field.type, {}) for attr_name in attrs.keys(): if not store.find( models.FieldAttr, And(models.FieldAttr.field_id == field.id, models.FieldAttr.name == attr_name)).one(): field.attrs.add( models.db_forge_obj(store, models.FieldAttr, attrs[attr_name]))
def _parse_string_state(self, state, table_field): if not state.text: return text = u'%%%s%%' % state.text.lower() retval = Like(table_field, text, case_sensitive=False) if state.mode == StringQueryState.NOT_CONTAINS: retval = Not(retval) return retval
def _userCanAccessSubmissionStormClause(user): """Limit results of HWSubmission queries to rows the user can access. """ submission_is_public = Not(HWSubmission.private) admins = getUtility(ILaunchpadCelebrities).admin janitor = getUtility(ILaunchpadCelebrities).janitor if user is None: return submission_is_public elif user.inTeam(admins) or user == janitor: return True else: public = Not(HWSubmission.private) subselect = Select( TeamParticipation.teamID, And(HWSubmission.ownerID == TeamParticipation.teamID, TeamParticipation.personID == user.id, HWSubmission.private)) has_access = HWSubmission.ownerID.is_in(subselect) return Or(public, has_access)
def get_specification_started_clause(): return Or( Not( Specification.implementation_status.is_in([ SpecificationImplementationStatus.UNKNOWN, SpecificationImplementationStatus.NOTSTARTED, SpecificationImplementationStatus.DEFERRED, SpecificationImplementationStatus.INFORMATIONAL ])), And( Specification.implementation_status == SpecificationImplementationStatus.INFORMATIONAL, Specification.definition_status == SpecificationDefinitionStatus.APPROVED))
def db_update_fieldattrs(store, field_id, field_attrs, language): """ """ attrs_ids = [] for name, value in field_attrs.iteritems(): value['name'] = name if value['type'] == u'localized': fill_localized_keys(value, ['value'], language) attrs_ids.append(db_update_fieldattr(store, field_id, value)) store.find( models.FieldAttr, And(models.FieldAttr.field_id == field_id, Not(In(models.FieldAttr.id, attrs_ids)))).remove()
def _get_status_query(self, state): current_branch = api.get_current_branch(self.store) if state.value == 'pending': return And(TransferOrder.status == TransferOrder.STATUS_SENT, TransferOrder.destination_branch_id == current_branch.id) elif state.value == 'received': return And(TransferOrder.status == TransferOrder.STATUS_RECEIVED, TransferOrder.destination_branch_id == current_branch.id) elif state.value == 'sent': return And(TransferOrder.source_branch_id == current_branch.id, Not(TransferOrder.status == TransferOrder.STATUS_CANCELLED)) elif state.value == 'cancelled': return And(TransferOrder.status == TransferOrder.STATUS_CANCELLED, TransferOrder.source_branch_id == current_branch.id) else: return Or(TransferOrder.source_branch_id == current_branch.id, TransferOrder.destination_branch_id == current_branch.id)
def db_update_fieldoptions(store, field, options, language): """ Update options :param store: the store on which perform queries. :param field_id: the field_id on wich bind the provided options :param language: the language of the option definition dict """ options_ids = [ db_update_fieldoption(store, field, option['id'], option, language) for option in options ] store.find( models.FieldOption, And(models.FieldOption.field_id == field.id, Not(In(models.FieldOption.id, options_ids)))).remove()
def getExpiringProducts(cls): """See `ExpirationSourceMixin`.""" earliest_date, latest_date, past_date = cls._get_expiration_dates() recent_jobs = And( ProductJob.job_type == cls.class_job_type, ProductJob.job_id == Job.id, Job.date_created > past_date, ) conditions = [ Product.active == True, CommercialSubscription.productID == Product.id, CommercialSubscription.date_expires >= earliest_date, CommercialSubscription.date_expires < latest_date, Not(Product.id.is_in(Select( ProductJob.product_id, tables=[ProductJob, Job], where=recent_jobs))), ] return IStore(Product).find(Product, *conditions)
def db_update_fieldoptions(store, field_id, options, language): """ Update options :param store: the store on which perform queries. :param field_id: the field_id on wich bind the provided options :param language: the language of the option definition dict """ options_ids = [] for option in options: option['field_id'] = field_id options_ids.append(db_update_fieldoption(store, unicode(option['id']), option, language)) if options_ids: store.find(models.FieldOption, And(models.FieldOption.field_id == field_id, Not(In(models.FieldOption.id, options_ids)))).remove() else: store.find(models.FieldOption, And(models.FieldOption.field_id == field_id)).remove()
def db_update_steps(store, context_id, steps, language): """ Update steps :param store: the store on which perform queries. :param context: the context on which register specified steps. :param steps: a dictionary containing the steps to be updated. :param language: the language of the specified steps. """ steps_ids = [] for step in steps: step['context_id'] = context_id steps_ids.append(db_update_step(store, step['id'], step, language)) store.find( models.Step, And(models.Step.context_id == context_id, Not(In(models.Step.id, steps_ids)))).remove()
def get_total_confirmed_value(self): """Returns the sum of all confirmed payments values This will consider all payments ignoring cancelled and preview ones, that is, if a payment is confirmed/reviewing/paid it will be summed. If you want to consider the preview ones too, use :meth:`.get_total_value` instead :returns: the total confirmed payments value """ payments = self.store.find( Payment, And(Payment.group_id == self.id, Not(In(Payment.status, [Payment.STATUS_CANCELLED, Payment.STATUS_PREVIEW])))) return self._get_payments_sum(payments, Payment.value)
def findShared(self): """Provide tuples of (other, this) items for each shared POTMsgSet. Only return those that are shared but shouldn't be because they are now in non-sharing templates. """ # XXX wgrant 2014-08-27: This has always been pretty broken. # If a sharing subset has been split, templates in the other # subset will end up being split from each other as well! sharing_subset = getUtility(IPOTemplateSet).getSharingSubset( product=self.potemplate.product, distribution=self.potemplate.distribution, sourcepackagename=self.potemplate.sourcepackagename) sharing_ids = list( sharing_subset.getSharingPOTemplateIDs(self.potemplate.name)) ThisItem = ClassAlias(TranslationTemplateItem, 'ThisItem') OtherItem = ClassAlias(TranslationTemplateItem, 'OtherItem') return Store.of(self.potemplate).find( (OtherItem, ThisItem), ThisItem.potemplateID == self.potemplate.id, OtherItem.potmsgsetID == ThisItem.potmsgsetID, Not(OtherItem.potemplateID.is_in(sharing_ids)), )
def run(self): """See `IRemoveArtifactSubscriptionsJob`.""" logger = logging.getLogger() logger.info(self.getOperationDescription()) bug_filters = [] branch_filters = [] specification_filters = [] if self.branch_ids: branch_filters.append(Branch.id.is_in(self.branch_ids)) if self.specification_ids: specification_filters.append( Specification.id.is_in(self.specification_ids)) if self.bug_ids: bug_filters.append(BugTaskFlat.bug_id.is_in(self.bug_ids)) else: if self.information_types: bug_filters.append( BugTaskFlat.information_type.is_in(self.information_types)) branch_filters.append( Branch.information_type.is_in(self.information_types)) specification_filters.append( Specification.information_type.is_in( self.information_types)) if self.product: bug_filters.append(BugTaskFlat.product == self.product) branch_filters.append(Branch.product == self.product) specification_filters.append( Specification.product == self.product) if self.distro: bug_filters.append(BugTaskFlat.distribution == self.distro) branch_filters.append(Branch.distribution == self.distro) specification_filters.append( Specification.distribution == self.distro) if self.grantee: bug_filters.append( In( BugSubscription.person_id, Select(TeamParticipation.personID, where=TeamParticipation.team == self.grantee))) branch_filters.append( In( BranchSubscription.personID, Select(TeamParticipation.personID, where=TeamParticipation.team == self.grantee))) specification_filters.append( In( SpecificationSubscription.personID, Select(TeamParticipation.personID, where=TeamParticipation.team == self.grantee))) if bug_filters: bug_filters.append( Not( Or(*get_bug_privacy_filter_terms( BugSubscription.person_id)))) bug_subscriptions = IStore(BugSubscription).using( BugSubscription, Join(BugTaskFlat, BugTaskFlat.bug_id == BugSubscription.bug_id)).find( BugSubscription, *bug_filters).config(distinct=True) for sub in bug_subscriptions: sub.bug.unsubscribe(sub.person, self.requestor, ignore_permissions=True) if branch_filters: branch_filters.append( Not(Or( *get_branch_privacy_filter(BranchSubscription.personID)))) branch_subscriptions = IStore(BranchSubscription).using( BranchSubscription, Join(Branch, Branch.id == BranchSubscription.branchID)).find( BranchSubscription, *branch_filters).config(distinct=True) for sub in branch_subscriptions: sub.branch.unsubscribe(sub.person, self.requestor, ignore_permissions=True) if specification_filters: specification_filters.append( Not(*get_specification_privacy_filter( SpecificationSubscription.personID))) tables = (SpecificationSubscription, Join( Specification, Specification.id == SpecificationSubscription.specificationID)) specifications_subscriptions = IStore( SpecificationSubscription).using(*tables).find( SpecificationSubscription, *specification_filters).config(distinct=True) for sub in specifications_subscriptions: sub.specification.unsubscribe(sub.person, self.requestor, ignore_permissions=True)
def __ne__(self, other): return Not(self == other)
def del_cfg_not_in_groups(store): store.find( Config, Not(In(Config.var_group, [u'node', u'notification', u'private']))).remove()
def _get_structural_subscription_filter_id_query( bug, bugtasks, level, direct_subscribers): """Helper function. This provides the core implementation for get_structural_subscribers. :param bug: a bug. :param bugtasks: an iterable of one or more bugtasks of the bug. :param level: a notification level. :param direct_subscribers: a collection of Person objects who are directly subscribed to the bug. """ # Circular. :-( from lp.bugs.model.bugtasksearch import get_bug_bulk_privacy_filter_terms # We get the ids because we need to use group by in order to # look at the filters' tags in aggregate. Once we have the ids, # we can get the full set of what we need in subsuming or # subsequent SQL calls. # (Aside 1: We could in theory get all the fields we wanted with # a hack--we could use an aggregrate function like max to get # fields that we know will be unique--but Storm would not like # it.) # (Aside 2: IMO Postgres should allow getting other fields if # the group-by key is a primary key and the other fields desired # are other values from the same table as the group-by key, or # values of a table linked by a foreign key from the same table # as the group-by key...but that's dreaming.) # See the docstring of get_structural_subscription_targets. query_arguments = list( get_structural_subscription_targets(bugtasks)) if not query_arguments: # We have no bugtasks. return None, None # With large numbers of filters in the system, it's fastest in our # tests if we get a set of structural subscriptions pertinent to the # given targets, and then work with that. It also comes in handy # when we have to do a union, because we can share the work across # the two queries. # We will exclude people who have a direct subscription to the bug. filters = [] if direct_subscribers is not None: if direct_subscribers: filters.append( Not(In(StructuralSubscription.subscriberID, tuple(person.id for person in direct_subscribers)))) else: filters.append( Not(In(StructuralSubscription.subscriberID, Select(BugSubscription.person_id, BugSubscription.bug == bug)))) if bug.private: filters.append( get_bug_bulk_privacy_filter_terms( StructuralSubscription.subscriberID, bug)) candidates = list(_get_structural_subscriptions( StructuralSubscription.id, query_arguments, *filters)) if not candidates: # If there are no structural subscriptions for these targets, # then we don't need to look at the importance, status, and # tags. We're done. return None # The "conditions" list will eventually be passed to a Storm # "And" function, and then become the WHERE clause of our SELECT. conditions = [In(StructuralSubscription.id, candidates)] # Handling notification level is trivial, so we include that first. if level is not None: conditions.append( BugSubscriptionFilter.bug_notification_level >= level) # This handles the bugtask-specific attributes of status and importance. conditions.append(_calculate_bugtask_condition(query_arguments)) # Handle filtering by information type. conditions.append(Or( BugSubscriptionFilterInformationType.information_type == bug.information_type, BugSubscriptionFilterInformationType.information_type == None)) # Now we handle tags. This actually assembles the query, because it # may have to union two queries together. # Note that casting bug.tags to a list subtly removes the security # proxy on the list. Strings are never security-proxied, so we # don't have to worry about them. return _calculate_tag_query(conditions, list(bug.tags))
def watches_ready_to_check(self): return Store.of(self).find( BugWatch, BugWatch.bugtracker == self, Not(BugWatch.next_check == None), BugWatch.next_check <= datetime.now(timezone('UTC')))
def _calculate_tag_query(conditions, tags): """Determine tag-related conditions and assemble a query. :param conditions: the other conditions that constrain the query. :param tags: the list of tags that the bug has. """ # These are tables and joins we will want. We leave out the tag join # because that needs to be added conditionally. tables = [ StructuralSubscription, Join(BugSubscriptionFilter, BugSubscriptionFilter.structural_subscription_id == StructuralSubscription.id), LeftJoin(BugSubscriptionFilterStatus, BugSubscriptionFilterStatus.filter_id == BugSubscriptionFilter.id), LeftJoin(BugSubscriptionFilterImportance, BugSubscriptionFilterImportance.filter_id == BugSubscriptionFilter.id), LeftJoin(BugSubscriptionFilterInformationType, BugSubscriptionFilterInformationType.filter_id == BugSubscriptionFilter.id)] tag_join = LeftJoin( BugSubscriptionFilterTag, BugSubscriptionFilterTag.filter_id == BugSubscriptionFilter.id) # If the bug has no tags, this is relatively easy. Otherwise, not so # much. if len(tags) == 0: # The bug has no tags. We should leave out filters that # require any generic non-empty set of tags # (BugSubscriptionFilter.include_any_tags), which we do with # the conditions. conditions.append(Not(BugSubscriptionFilter.include_any_tags)) tables.append(tag_join) return Select( BugSubscriptionFilter.id, tables=tables, where=And(*conditions), # We have to make sure that the filter does not require # any *specific* tags. We do that with a GROUP BY on the # filters, and then a HAVING clause that aggregates the # BugSubscriptionFilterTags that are set to "include" the # tag. (If it is not an include, that is an exclude, and a # bug without tags will not have a particular tag, so we can # ignore those in this case.) This requires a CASE # statement within the COUNT. group_by=(BugSubscriptionFilter.id,), having=Count( SQL('CASE WHEN BugSubscriptionFilterTag.include ' 'THEN BugSubscriptionFilterTag.tag END')) == 0) else: # The bug has some tags. This will require a bit of fancy # footwork. First, though, we will simply want to leave out # filters that should only match bugs without tags. conditions.append(Not(BugSubscriptionFilter.exclude_any_tags)) # We're going to have to do a union with another query. One # query will handle filters that are marked to include *any* # of the filter's selected tags, and the other query will # handle filters that include *all* of the filter's selected # tags (as determined by BugSubscriptionFilter.find_all_tags). # Every aspect of the unioned queries' WHERE clauses *other # than tags* will need to be the same, and so we perform that # separately, first. When Storm supports the WITH statement # (bug 729134), we can consider folding this back into a single # query. candidates = list( IStore(BugSubscriptionFilter).using(*tables).find( BugSubscriptionFilter.id, *conditions)) if not candidates: return None # As mentioned, in this first SELECT we handle filters that # match any of the filter's tags. This can be a relatively # straightforward query--we just need a bit more added to # our WHERE clause, and we don't need a GROUP BY/HAVING. first_select = Select( BugSubscriptionFilter.id, tables=[BugSubscriptionFilter, tag_join], where=And( Or( # We want filters that proclaim they simply want any tags. BugSubscriptionFilter.include_any_tags, # Also include filters that match any tag... And(Not(BugSubscriptionFilter.find_all_tags), Or( # ...with a positive match... And(BugSubscriptionFilterTag.include, In(BugSubscriptionFilterTag.tag, tags)), # ...or with a negative match... And(Not(BugSubscriptionFilterTag.include), Not(In(BugSubscriptionFilterTag.tag, tags))), # ...or if the filter does not specify any tags. BugSubscriptionFilterTag.tag == None))), In(BugSubscriptionFilter.id, candidates))) # We have our first clause. Now we start on the second one: # handling filters that match *all* tags. # This second query will have a HAVING clause, which is where some # tricky bits happen. We first make a SQL snippet that # represents the tags on this bug. It is straightforward # except for one subtle hack: the addition of the empty # space in the array. This is because we are going to be # aggregating the tags on the filters using ARRAY_AGG, which # includes NULLs (unlike most other aggregators). That # is an issue here because we use CASE statements to divide # up the set of tags that are supposed to be included and # supposed to be excluded. This means that if we aggregate # "CASE WHEN BugSubscriptionFilterTag.include THEN # BugSubscriptionFilterTag.tag END" then that array will # include NULL. SQL treats NULLs as unknowns that can never # be matched, so the array of ['foo', 'bar', NULL] does not # contain the array of ['foo', NULL] ("SELECT # ARRAY['foo','bar',NULL]::TEXT[] @> # ARRAY['foo',NULL]::TEXT[];" is false). Therefore, so we # can make the HAVING statement we want to make without # defining a custom Postgres aggregator, we use a single # space as, effectively, NULL. This is safe because a # single space is not an acceptable tag. Again, the # clearest alternative is defining a custom Postgres aggregator. tags_array = "ARRAY[%s,' ']::TEXT[]" % ",".join( quote(tag) for tag in tags) # Now let's build the select itself. second_select = Select( BugSubscriptionFilter.id, tables=[BugSubscriptionFilter, tag_join], # Our WHERE clause is straightforward. We are simply # focusing on BugSubscriptionFilter.find_all_tags, when the # first SELECT did not consider it. where=And(BugSubscriptionFilter.find_all_tags, In(BugSubscriptionFilter.id, candidates)), # The GROUP BY collects the filters together. group_by=(BugSubscriptionFilter.id,), having=And( # The list of tags should be a superset of the filter tags to # be included. ArrayContains( SQL(tags_array), # This next line gives us an array of the tags that the # filter wants to include. Notice that it includes the # empty string when the condition does not match, per the # discussion above. ArrayAgg( SQL("CASE WHEN BugSubscriptionFilterTag.include " "THEN BugSubscriptionFilterTag.tag " "ELSE ' '::TEXT END"))), # The list of tags should also not intersect with the # tags that the filter wants to exclude. Not( ArrayIntersects( SQL(tags_array), # This next line gives us an array of the tags # that the filter wants to exclude. We do not bother # with the empty string, and therefore allow NULLs # into the array, because in this case we are # determining whether the sets intersect, not if the # first set subsumes the second. ArrayAgg( SQL('CASE WHEN ' 'NOT BugSubscriptionFilterTag.include ' 'THEN BugSubscriptionFilterTag.tag END')))))) # Everything is ready. Return the union. return Union(first_select, second_select)