def get_users(self, **options): """ Returns users using given options. @return: [DynamicObject<user info...>] """ expressions = [] statuses = options.get('statuses') if statuses is not None and len(statuses) > 0: expressions.append(In(UserEntity.user_status, statuses)) types = options.get('types') if types is not None and len(types) > 0: expressions.append(In(UserEntity.user_type, types)) store = get_current_transaction_store() entities = store.find(UserEntity, And(*expressions)) results = [] for user in entities: if user.user_id not in ('admin', 'root', 'support'): results.append(DynamicObject(entity_to_dic(user))) return results
def getBugCvesForBugTasks(self, bugtasks, cve_mapper=None): """See ICveSet.""" bugs = load_related(Bug, bugtasks, ('bugID', )) if len(bugs) == 0: return [] bug_ids = [bug.id for bug in bugs] # Do not use BugCve instances: Storm may need a very long time # to look up the bugs and CVEs referenced by a BugCve instance # when the +cve view of a distroseries is rendered: There may # be a few thousand (bug, CVE) tuples, while the number of bugs # and CVEs is in the order of hundred. It is much more efficient # to retrieve just (bug_id, cve_id) from the BugCve table and # to map this to (Bug, CVE) here, instead of letting Storm # look up the CVE and bug for a BugCve instance, even if bugs # and CVEs are bulk loaded. store = Store.of(bugtasks[0]) bugcve_ids = store.find( (BugCve.bugID, BugCve.cveID), In(BugCve.bugID, bug_ids)) bugcve_ids.order_by(BugCve.bugID, BugCve.cveID) bugcve_ids = list(bugcve_ids) cve_ids = set(cve_id for bug_id, cve_id in bugcve_ids) cves = store.find(Cve, In(Cve.id, list(cve_ids))) if cve_mapper is None: cvemap = dict((cve.id, cve) for cve in cves) else: cvemap = dict((cve.id, cve_mapper(cve)) for cve in cves) bugmap = dict((bug.id, bug) for bug in bugs) return [ (bugmap[bug_id], cvemap[cve_id]) for bug_id, cve_id in bugcve_ids ]
def enable_disable_languages(store, request): cur_enabled_langs = EnabledLanguage.list(store) new_enabled_langs = [unicode(y) for y in request['languages_enabled']] if len(new_enabled_langs) < 1: raise errors.InvalidInputFormat("No languages enabled!") if request['default_language'] not in new_enabled_langs: raise errors.InvalidInputFormat( "Invalid lang code for chosen default_language") appdata = None for lang_code in new_enabled_langs: if lang_code not in LANGUAGES_SUPPORTED_CODES: raise errors.InvalidInputFormat("Invalid lang code: %s" % lang_code) if lang_code not in cur_enabled_langs: if appdata is None: appdata = load_appdata() log.debug("Adding a new lang %s" % lang_code) EnabledLanguage.add_new_lang(store, lang_code, appdata) to_remove = list(set(cur_enabled_langs) - set(new_enabled_langs)) if to_remove: store.find(models.User, In(models.User.language, to_remove)).set(language=request['default_language']) models.db_delete(store, models.l10n.EnabledLanguage, In(models.l10n.EnabledLanguage.name, to_remove))
def db_fix_fields_attrs(store): ''' Ensures that the current store and the field_attrs.json file correspond. The content of the field_attrs dict is used to add and remove all of the excepted forms of field_attrs for FieldAttrs in the db. ''' # Load the field attributes descriptors field_attrs = {} with file(GLSettings.field_attrs_file, 'r') as f: json_string = f.read() field_attrs = json.loads(json_string) std_lst = [ 'inputbox', 'textarea', 'multichoice', 'checkbox', 'tos', 'date' ] for field_type, attrs_dict in field_attrs.iteritems(): attrs_to_keep_for_type = attrs_dict.keys() if field_type in std_lst: # Ensure that the standard field attrs do not have extra attr rows res = store.find( models.FieldAttr, Not(In(models.FieldAttr.name, attrs_to_keep_for_type)), models.FieldAttr.field_id == models.Field.id, models.Field.type == field_type, models.Field.key == unicode('')) else: # Look for dropped attrs in non-standard field_groups like whistleblower_identity res = store.find( models.FieldAttr, Not(In(models.FieldAttr.name, attrs_to_keep_for_type)), models.FieldAttr.field_id == models.Field.id, models.Field.key == field_type) count = res.count() if count: log.debug("Removing %d attributes from fields of type %s" % (count, field_type)) for r in res: store.remove(r) # Add keys to the db that have been added to field_attrs for field in store.find(models.Field): typ = field.type if field.key == '' else field.key attrs = field_attrs.get(typ, {}) for attr_name, attr_dict in attrs.iteritems(): if not store.find( models.FieldAttr, And(models.FieldAttr.field_id == field.id, models.FieldAttr.name == attr_name)).one(): log.debug("Adding new field attr %s.%s" % (typ, attr_name)) attr_dict['name'] = attr_name field.attrs.add( models.db_forge_obj(store, models.FieldAttr, attr_dict))
def db_fix_fields_attrs(store): """ Ensures that the current store and the field_attrs.json file correspond. The content of the field_attrs dict is used to add and remove all of the excepted forms of field_attrs for FieldAttrs in the db. """ field_attrs = read_json_file(Settings.field_attrs_file) special_lst = ['whistleblower_identity'] std_lst = [ 'inputbox', 'textarea', 'multichoice', 'checkbox', 'tos', 'date' ] for field_type, attrs_dict in field_attrs.items(): attrs_to_keep_for_type = attrs_dict.keys() if field_type in std_lst: # Ensure that the standard field attrs do not have extra attr rows res = store.find( models.FieldAttr, Not(In(models.FieldAttr.name, attrs_to_keep_for_type)), models.FieldAttr.field_id == models.Field.id, models.Field.type == field_type, Not(In(models.Field.id, special_lst))) else: # Look for dropped attrs in non-standard field_groups like whistleblower_identity res = store.find( models.FieldAttr, Not(In(models.FieldAttr.name, attrs_to_keep_for_type)), models.FieldAttr.field_id == models.Field.id, models.Field.id == field_type) count = res.count() if count: log.debug("Removing %d attributes from fields of type %s", count, field_type) for r in res: store.remove(r) # Add keys to the db that have been added to field_attrs for field in store.find(models.Field): typ = field.type if field.id not in special_lst else field.id attrs = field_attrs.get(typ, {}) for attr_name, attr_dict in attrs.items(): if not store.find( models.FieldAttr, And(models.FieldAttr.field_id == field.id, models.FieldAttr.name == attr_name)).one(): log.debug("Adding new field attr %s.%s", typ, attr_name) attr_dict['name'] = attr_name attr_dict['field_id'] = field.id models.db_forge_obj(store, models.FieldAttr, attr_dict)
def db_prepare_fields_serialization(store, fields): ret = {'fields': {}, 'attrs': {}, 'options': {}, 'triggers': {}} fields_ids = [f.id for f in fields] for f in fields: if f.template_id is not None: fields_ids.append(f.template_id) for f in fields_ids: ret['fields'][f] = [] ret['attrs'][f] = [] ret['options'][f] = [] ret['triggers'][f] = [] while len(fields_ids): fs = store.find(models.Field, In(models.Field.fieldgroup_id, fields_ids)) tmp = [] for f in fs: ret['fields'][f.fieldgroup_id].append(f) tmp.append(f.id) if f.template_id is not None: fields_ids.append(f.template_id) tmp.append(f.template_id) del fields_ids[:] for f in tmp: ret['fields'][f] = [] ret['attrs'][f] = [] ret['options'][f] = [] ret['triggers'][f] = [] fields_ids.append(f) objs = store.find(models.FieldAttr, In(models.FieldAttr.field_id, ret['fields'].keys())) for obj in objs: ret['attrs'][obj.field_id].append(obj) objs = store.find(models.FieldOption, In(models.FieldOption.field_id, ret['fields'].keys())) for obj in objs: ret['options'][obj.field_id].append(obj) objs = store.find( models.FieldOption, In(models.FieldOption.trigger_field, ret['fields'].keys())) for obj in objs: ret['triggers'][obj.field_id].append(obj) return ret
def db_prepare_fields_serialization(store, fields): ret = { 'fields': {}, 'attrs': {}, 'options': {}, 'triggers': {} } fields_ids = [] for f in fields: fields_ids.append(f.id) if f.template_id is not None: fields_ids.append(f.template_id) tmp = copy.deepcopy(fields_ids) while tmp: fs = store.find(models.Field, In(models.Field.fieldgroup_id, tmp)) tmp = [] for f in fs: tmp.append(f.id) if f.template_id is not None: tmp.append(f.template_id) if f.fieldgroup_id not in ret['fields']: ret['fields'][f.fieldgroup_id] = [] ret['fields'][f.fieldgroup_id].append(f) fields_ids.extend(tmp) objs = store.find(models.FieldAttr, In(models.FieldAttr.field_id, fields_ids)) for obj in objs: if obj.field_id not in ret['attrs']: ret['attrs'][obj.field_id] = [] ret['attrs'][obj.field_id].append(obj) objs = store.find(models.FieldOption, In(models.FieldOption.field_id, fields_ids)) for obj in objs: if obj.field_id not in ret['options']: ret['options'][obj.field_id] = [] ret['options'][obj.field_id].append(obj) objs = store.find(models.FieldOption, In(models.FieldOption.trigger_field, fields_ids)) for obj in objs: if obj.field_id not in ret['triggers']: ret['triggers'][obj.field_id] = [] ret['triggers'][obj.field_id].append(obj) return ret
def query_structural_subscriptions( what, bug, bugtasks, level, exclude=None): """Query into structural subscriptions for a given bug. :param what: The fields to fetch. Choose from `Person`, `StructuralSubscription`, `BugSubscriptionFilter`, or a combo. :param bug: An `IBug` :param bugtasks: An iterable of `IBugTask`. :param level: A level from `BugNotificationLevel`. Filters below this level will be excluded. :param exclude: `Person`s to exclude (e.g. direct subscribers). """ from lp.registry.model.person import Person # Circular. filter_id_query = ( _get_structural_subscription_filter_id_query( bug, bugtasks, level, exclude)) if not filter_id_query: return EmptyResultSet() source = IStore(StructuralSubscription).using( StructuralSubscription, Join(BugSubscriptionFilter, BugSubscriptionFilter.structural_subscription_id == StructuralSubscription.id), Join(Person, Person.id == StructuralSubscription.subscriberID)) conditions = In( BugSubscriptionFilter.id, filter_id_query) return source.find(what, conditions)
def import_receivers(store, submission, receiver_id_list): context = submission.context if not len(receiver_id_list): log.err("Receivers required to be selected, not empty") raise errors.SubmissionValidationFailure("needed almost one receiver selected") if context.maximum_selectable_receivers and \ len(receiver_id_list) > context.maximum_selectable_receivers: raise errors.InvalidInputFormat("provided an invalid number of receivers") for receiver in store.find(Receiver, In(Receiver.id, receiver_id_list)): if context not in receiver.contexts: raise errors.InvalidInputFormat("forged receiver selection, you fuzzer! <:") try: if not GLSettings.memory_copy.allow_unencrypted and \ receiver.pgp_key_status != u'enabled': log.err("Encrypted only submissions are supported. Cannot select [%s]" % receiver.id) continue submission.receivers.add(receiver) except Exception as excep: log.err("Receiver %s can't be assigned to the tip [%s]" % (receiver.id, excep)) continue log.debug("+receiver [%s] In tip (%s) #%d" % \ (receiver.name, submission.id, submission.receivers.count() )) if submission.receivers.count() == 0: log.err("Receivers required to be selected, not empty") raise errors.SubmissionValidationFailure("needed at least one receiver selected [2]")
def perform_tips_operation(store, receiver_id, operation, rtips_ids): receiver = store.find(Receiver, Receiver.id == receiver_id).one() rtips = store.find( ReceiverTip, And(ReceiverTip.receiver_id == receiver_id, In(ReceiverTip.id, rtips_ids))) if operation == 'postpone': can_postpone_expiration = GLSettings.memory_copy.can_postpone_expiration or receiver.can_postpone_expiration if not can_postpone_expiration: raise errors.ForbiddenOperation for rtip in rtips: db_postpone_expiration_date(rtip) elif operation == 'delete': can_delete_submission = GLSettings.memory_copy.can_delete_submission or receiver.can_delete_submission if not can_delete_submission: raise errors.ForbiddenOperation for rtip in rtips: db_delete_rtip(store, rtip) log.debug("Multiple %s of %d Tips completed" % (operation, len(rtips_ids)))
def db_update_fieldattrs(store, field_id, field_attrs, language): attrs_ids = [db_update_fieldattr(store, field_id, attr_name, attr, language) for attr_name, attr in field_attrs.iteritems()] if attrs_ids: store.find(models.FieldAttr, And(models.FieldAttr.field_id == field_id, Not(In(models.FieldAttr.id, attrs_ids)))).remove() else: store.find(models.FieldAttr, And(models.FieldAttr.field_id == field_id)).remove()
def _getVisiblePrivateSpecificationIDs(self, person, specifications): store = Store.of(specifications[0]) tables = ( Specification, Join( AccessPolicy, And( Or( Specification.distributionID == AccessPolicy.distribution_id, Specification.productID == AccessPolicy.product_id), AccessPolicy.type == Specification.information_type)), Join( AccessPolicyGrantFlat, AccessPolicy.id == AccessPolicyGrantFlat.policy_id ), LeftJoin( AccessArtifact, AccessArtifact.id == AccessPolicyGrantFlat.abstract_artifact_id), Join( TeamParticipation, TeamParticipation.teamID == AccessPolicyGrantFlat.grantee_id)) spec_ids = [spec.id for spec in specifications] return set(store.using(*tables).find( Specification.id, Or( AccessPolicyGrantFlat.abstract_artifact_id == None, AccessArtifact.specification == Specification.id), TeamParticipation.personID == person.id, In(Specification.id, spec_ids)))
def getBugCvesForBugTasks(self, bugtasks, cve_mapper=None): """See ICveSet.""" bugs = bulk.load_related(Bug, bugtasks, ('bugID', )) if len(bugs) == 0: return [] store = Store.of(bugtasks[0]) xrefs = getUtility(IXRefSet).findFromMany([(u'bug', unicode(bug.id)) for bug in bugs], types=[u'cve']) bugcve_ids = set() for bug_key in xrefs: for cve_key in xrefs[bug_key]: bugcve_ids.add((int(bug_key[1]), cve_key[1])) bugcve_ids = list(sorted(bugcve_ids)) cves = store.find(Cve, In(Cve.sequence, [seq for _, seq in bugcve_ids])) if cve_mapper is None: cvemap = dict((cve.sequence, cve) for cve in cves) else: cvemap = dict((cve.sequence, cve_mapper(cve)) for cve in cves) bugmap = dict((bug.id, bug) for bug in bugs) return [(bugmap[bug_id], cvemap[cve_sequence]) for bug_id, cve_sequence in bugcve_ids]
def perform_tips_operation(store, receiver_id, operation, rtips_ids): receiver = store.find(models.Receiver, models.Receiver.id == receiver_id).one() for itip in store.find( models.InternalTip, models.ReceiverTip.receiver_id == receiver_id, In(models.ReceiverTip.id, rtips_ids), models.InternalTip.id == models.ReceiverTip.internaltip_id): if operation == 'postpone': can_postpone_expiration = State.tenant_cache[ 1].can_postpone_expiration or receiver.can_postpone_expiration if not can_postpone_expiration: raise errors.ForbiddenOperation db_postpone_expiration_date(store, itip) elif operation == 'delete': can_delete_submission = State.tenant_cache[ 1].can_delete_submission or receiver.can_delete_submission if not can_delete_submission: raise errors.ForbiddenOperation db_delete_itip(store, itip) log.debug("Multiple %s of %d Tips completed" % (operation, len(rtips_ids)))
def db_fix_fields_attrs(store): # Load the field attributes descriptors field_attrs = {} with file(GLSettings.field_attrs_file, 'r') as f: json_string = f.read() field_attrs = json.loads(json_string) # Get the list of keys uptodate_keys = [] for k1 in field_attrs.keys(): for k2 in field_attrs[k1].keys(): field_attrs[k1][k2]['name'] = k2 uptodate_keys.append(k2) # Remove keys that have been removed store.find(models.FieldAttr, Not(In(models.FieldAttr.name, uptodate_keys))).remove() # Add keys that have been added for field in store.find(models.Field): attrs = field_attrs.get(field.type, {}) for attr_name in attrs.keys(): if not store.find( models.FieldAttr, And(models.FieldAttr.field_id == field.id, models.FieldAttr.name == attr_name)).one(): field.attrs.add( models.db_forge_obj(store, models.FieldAttr, attrs[attr_name]))
def db_receiver_get_wbfile_list(store, itip_id): rtips = store.find(ReceiverTip, ReceiverTip.internaltip_id == itip_id) rtips_ids = [rt.id for rt in rtips] wbfiles = store.find(WhistleblowerFile, In(WhistleblowerFile.receivertip_id, rtips_ids)) return [receiver_serialize_wbfile(wbfile) for wbfile in wbfiles]
def db_serialize_questionnaire_answers(store, usertip): internaltip = usertip.internaltip questionnaire = db_get_archived_questionnaire_schema( store, internaltip.questionnaire_hash, GLSettings.memory_copy.default_language) answers_ids = [] filtered_answers_ids = [] for s in questionnaire: for f in s['children']: if 'key' in f and f['key'] == 'whistleblower_identity': if isinstance(usertip, models.WhistleblowerTip) or \ f['attrs']['visibility_subject_to_authorization']['value'] == False or \ (isinstance(usertip, models.ReceiverTip) and usertip.can_access_whistleblower_identity): answers_ids.append(f['id']) else: filtered_answers_ids.append(f['id']) else: answers_ids.append(f['id']) answers = store.find( models.FieldAnswer, And(models.FieldAnswer.internaltip_id == internaltip.id, In(models.FieldAnswer.key, answers_ids))) return db_serialize_questionnaire_answers_recursively(answers)
def _get_paid_payments(self): return self.store.find(Payment, And(Payment.group_id == self.id, In(Payment.status, [Payment.STATUS_PAID, Payment.STATUS_REVIEWING, Payment.STATUS_CONFIRMED])))
def put(self): parser = reqparse.RequestParser() parser.add_argument('id', type=int, required=True) parser.add_argument('name', type=unicode, required=True) parser.add_argument('stock', type=int, required=True) parser.add_argument('description', type=unicode, required=True) parser.add_argument('price', type=float, required=True) parser.add_argument('is_available', type=bool, required=True) parser.add_argument('categories', type=int, required=True, action='append') args = parser.parse_args() store = get_default_store() p = store.find(Product, Product.id == args['id']).one() if p is None: return "Fail", 404 p.name = args['name'] p.stock = args['stock'] p.description = args['description'] p.price = Decimal(args['price']) p.is_available = args['is_available'] p.categories.clear() cats = store.find(Category, In(Category.id, args['categories'])) for c in cats: p.categories.add(c) store.flush() return "Success", 201
def post(self): parser = reqparse.RequestParser() parser.add_argument('name', type=unicode, required=True) parser.add_argument('stock', type=int, required=True) parser.add_argument('description', type=unicode, required=True) parser.add_argument('price', type=float, required=True) parser.add_argument('is_available', type=bool, required=True) parser.add_argument('categories', type=int, required=True, action='append') args = parser.parse_args() store = get_default_store() p = Product() p.name = args['name'] p.stock = args['stock'] p.description = args['description'] p.price = Decimal(args['price']) p.is_available = args['is_available'] cats = store.find(Category, In(Category.id, args['categories'])) for c in cats: p.categories.add(c) store.add(p) store.commit() return "Success", 201
def import_receivers(store, submission, receiver_id_list): context = submission.context if not len(receiver_id_list): raise errors.SubmissionValidationFailure( "needed almost one receiver selected [1]") if context.maximum_selectable_receivers and \ len(receiver_id_list) > context.maximum_selectable_receivers: raise errors.InvalidInputFormat( "provided an invalid number of receivers") for receiver in store.find(models.Receiver, In(models.Receiver.id, receiver_id_list)): if context not in receiver.contexts: raise errors.InvalidInputFormat( "forged receiver selection, you fuzzer! <:") if not GLSettings.memory_copy.allow_unencrypted and receiver.user.pgp_key_status != u'enabled': raise errors.SubmissionValidationFailure( "the platform does not allow selection of receivers with encryption disabled" ) continue submission.receivers.add(receiver) log.debug("+receiver [%s] In tip (%s) #%d" % \ (receiver.user.name, submission.id, submission.receivers.count() )) if submission.receivers.count() == 0: raise errors.SubmissionValidationFailure( "needed almost one receiver selected [2]")
def linkedToBugs(self, bugs): """See `IBranchCollection`.""" bug_ids = [bug.id for bug in bugs] return self._filterBy([In(BugBranch.bugID, bug_ids)], table=BugBranch, join=Join(BugBranch, BugBranch.branch == Branch.id), symmetric=False)
def db_prepare_receivers_serialization(store, receivers): data = {'users': {}, 'imgs': {}} receivers_ids = [] img_ids = [] for r in receivers: receivers_ids.append(r.id) for o in store.find(models.User, In(models.User.id, receivers_ids)): data['users'][o.id] = o img_ids.append(o.img_id) for o in store.find(models.File, In(models.File.id, img_ids)): data['imgs'][o.id] = o.data return data
def db_prepare_receivers_serialization(store, receivers): data = {'users': {}, 'imgs': {}, 'contexts': {}} receivers_ids = [] for r in receivers: data['imgs'][r.id] = None data['contexts'][r.id] = [] receivers_ids.append(r.id) for o in store.find(models.User, In(models.User.id, receivers_ids)): data['users'][o.id] = o for o in store.find(models.ReceiverContext, In(models.ReceiverContext.receiver_id, receivers_ids)): data['contexts'][o.receiver_id].append(o.context_id) return data
class WorkOrderApprovedAndFinishedView(WorkOrderView): """A view for approved and finished |workorders| This is the same as :class:`.WorkOrderView`, but only approved and finished orders are showed here. """ clause = In(WorkOrder.status, [WorkOrder.STATUS_APPROVED, WorkOrder.STATUS_WORK_FINISHED])
def _fill_categories_combo(self): if self.categories_for_combo is not None: categories = self.store.find( WorkOrderCategory, In(WorkOrderCategory.name, self.categories_for_combo)) else: categories = self.store.find(WorkOrderCategory) self.category.color_attribute = 'color' self.category.prefill( api.for_combo(categories, empty=_(u"No category")))
def db_prepare_contexts_serialization(store, contexts): data = {'imgs': {}, 'receivers': {}} contexts_ids = [] img_ids = [] for c in contexts: contexts_ids.append(c.id) if c.img_id is not None: img_ids.append(c.img_id) for o in store.find(models.File, In(models.File.id, img_ids)): data['imgs'][o.id] = o.data for o in store.find(models.ReceiverContext, In(models.ReceiverContext.context_id, contexts_ids)): if o.context_id not in data['receivers']: data['receivers'][o.context_id] = [] data['receivers'][o.context_id].append(o.receiver_id) return data
def get_playlist_id(group): # Find last event in the group that could have changed the playlist events = g.store.find( GroupEvent, (GroupEvent.group == group) & In(GroupEvent.event_type, [u'join', u'leave', u'master'])) last = events.order_by(Desc(GroupEvent.created)).first() if last is not None: when = last.created else: when = datetime.datetime.utcnow() return unicode(hashlib.sha1(when.strftime('%s')).hexdigest())
def find(self, **options): """ Searches product histories. :param optiosn: :return: """ from_edit_date = options.get('from_edit_date') to_edit_date = options.get('to_edit_date') from_price = options.get('from_price') to_price = options.get('to_price') name = options.get('name') categories = options.get('categories') include_out_of_stock = options.get('include_out_of_stock') if include_out_of_stock is None: include_out_of_stock = False expressions = [] if not include_out_of_stock: expressions.append( ProductsHistoryEntity.product_history_status == ProductsHistoryEntity.ProductHistoryStatusEnum.IN_STOCK) if from_edit_date is not None: expressions.append(ProductsHistoryEntity.product_history_edit_date >= from_edit_date) if to_edit_date is not None: expressions.append( ProductsHistoryEntity.product_history_edit_date <= to_edit_date ) if from_price is not None: expressions.append( ProductsHistoryEntity.product_history_price >= from_price) if to_price is not None: expressions.append( ProductsHistoryEntity.product_history_price >= to_price) if name is not None and name.strip() != "": expressions.append( Like(ProductsHistoryEntity.product_history_name, "%{0}%".format(name))) if categories is not None and len(categories) > 0: expressions.append( In(ProductsHistoryEntity.product_history_category, categories)) store = get_current_transaction_store() entities = store.find( ProductsHistoryEntity, And(*expressions)).order_by( ProductsHistoryEntity.product_history_edit_date) results = [] for entity in entities: results.append(DynamicObject(entity_to_dic(entity))) return results
def getNotificationsToSend(self): """See IBugNotificationSet.""" # We preload the bug activity and the message in order to # try to reduce subsequent database calls: try to get direct # dependencies at once. We then also pre-load the pertinent bugs, # people (with their own dependencies), and message chunks before # returning the notifications that should be processed. # Sidestep circular reference. from lp.bugs.model.bug import Bug store = IStore(BugNotification) source = store.using( BugNotification, Join(Message, BugNotification.message == Message.id), LeftJoin(BugActivity, BugNotification.activity == BugActivity.id)) results = list( source.find( (BugNotification, BugActivity, Message), BugNotification.status == BugNotificationStatus.PENDING, BugNotification.date_emailed == None).order_by( 'BugNotification.bug', '-BugNotification.id')) interval = timedelta( minutes=int(config.malone.bugnotification_interval)) time_limit = (datetime.now(pytz.UTC) - interval) last_omitted_notification = None pending_notifications = [] people_ids = set() bug_ids = set() for notification, ignore, ignore in results: if notification.message.datecreated > time_limit: last_omitted_notification = notification elif (last_omitted_notification is not None and notification.message.ownerID == last_omitted_notification.message.ownerID and notification.bugID == last_omitted_notification.bugID and last_omitted_notification.message.datecreated - notification.message.datecreated < interval): last_omitted_notification = notification if last_omitted_notification != notification: last_omitted_notification = None pending_notifications.append(notification) people_ids.add(notification.message.ownerID) bug_ids.add(notification.bugID) # Now we do some calls that are purely for caching. # Converting these into lists forces the queries to execute. if pending_notifications: list( getUtility(IPersonSet).getPrecachedPersonsFromIDs( list(people_ids), need_validity=True, need_preferred_email=True)) list(IStore(Bug).find(Bug, In(Bug.id, list(bug_ids)))) pending_notifications.reverse() return pending_notifications