def addNotification(self, bug, is_comment, message, recipients, activity, deferred=False): """See `IBugNotificationSet`.""" if deferred: status = BugNotificationStatus.DEFERRED else: if not recipients: return status = BugNotificationStatus.PENDING bug_notification = BugNotification( bug=bug, is_comment=is_comment, message=message, date_emailed=None, activity=activity, status=status) store = Store.of(bug_notification) # XXX jamesh 2008-05-21: these flushes are to fix ordering # problems in the bugnotification-sending.txt tests. store.flush() bulk.create( (BugNotificationRecipient.bug_notification, BugNotificationRecipient.person, BugNotificationRecipient.reason_body, BugNotificationRecipient.reason_header), [(bug_notification, recipient) + recipients.getReason(recipient) for recipient in recipients]) bulk.create( (BugNotificationFilter.bug_notification, BugNotificationFilter.bug_subscription_filter), [(bug_notification, filter) for filter in recipients.subscription_filters]) return bug_notification
def bulkAddActivity(self, references, result=BugWatchActivityStatus.SYNC_SUCCEEDED, oops_id=None): """See `IBugWatchSet`.""" bulk.create( (BugWatchActivity.bug_watch_id, BugWatchActivity.result, BugWatchActivity.oops_id), [(bug_watch_id, result, ensure_unicode(oops_id)) for bug_watch_id in set(get_bug_watch_ids(references))])
def bulkAddActivity(self, references, result=BugWatchActivityStatus.SYNC_SUCCEEDED, oops_id=None): """See `IBugWatchSet`.""" bulk.create((BugWatchActivity.bug_watch_id, BugWatchActivity.result, BugWatchActivity.oops_id), [(bug_watch_id, result, ensure_unicode(oops_id)) for bug_watch_id in set(get_bug_watch_ids(references))])
def createMultiple( cls, copy_tasks, requester, copy_policy=PackageCopyPolicy.INSECURE, include_binaries=False, sponsored=None, unembargo=False, auto_approve=False, ): """See `IPlainPackageCopyJobSource`.""" store = IMasterStore(Job) job_ids = Job.createMultiple(store, len(copy_tasks), requester) job_contents = [ cls._composeJobInsertionTuple( copy_policy, include_binaries, job_id, task, sponsored, unembargo, auto_approve ) for job_id, task in zip(job_ids, copy_tasks) ] return bulk.create( ( PackageCopyJob.job_type, PackageCopyJob.target_distroseries, PackageCopyJob.copy_policy, PackageCopyJob.source_archive, PackageCopyJob.target_archive, PackageCopyJob.package_name, PackageCopyJob.job_id, PackageCopyJob.metadata, ), job_contents, get_primary_keys=True, )
def ensure(cls, concrete_artifacts): """See `IAccessArtifactSource`.""" from lp.blueprints.interfaces.specification import ISpecification from lp.bugs.interfaces.bug import IBug from lp.code.interfaces.branch import IBranch from lp.code.interfaces.gitrepository import IGitRepository existing = list(cls.find(concrete_artifacts)) if len(existing) == len(concrete_artifacts): return existing # Not everything exists. Create missing ones. needed = (set(concrete_artifacts) - set(abstract.concrete_artifact for abstract in existing)) insert_values = [] for concrete in needed: if IBug.providedBy(concrete): insert_values.append((concrete, None, None, None)) elif IBranch.providedBy(concrete): insert_values.append((None, concrete, None, None)) elif IGitRepository.providedBy(concrete): insert_values.append((None, None, concrete, None)) elif ISpecification.providedBy(concrete): insert_values.append((None, None, None, concrete)) else: raise ValueError("%r is not a supported artifact" % concrete) new = create( (cls.bug, cls.branch, cls.gitrepository, cls.specification), insert_values, get_objects=True) return list(existing) + new
def create_multiple_jobs(derived_series, parent_series): """Create `DistroSeriesDifferenceJob`s between parent and derived series. :param derived_series: A `DistroSeries` that is assumed to be derived from another one. :param parent_series: A `DistroSeries` that is a parent of `derived_series`. :return: A list of newly-created `DistributionJob` ids. """ store = IStore(SourcePackagePublishingHistory) spn_ids = store.find( SourcePackagePublishingHistory.sourcepackagenameID, SourcePackagePublishingHistory.distroseries == derived_series.id, SourcePackagePublishingHistory.status.is_in(active_publishing_status)) spn_ids = list(spn_ids) if len(spn_ids) == 0: return [] job_ids = Job.createMultiple(store, len(spn_ids)) return bulk.create( (DistributionJob.distribution, DistributionJob.distroseries, DistributionJob.job_type, DistributionJob.job_id, DistributionJob.metadata), [(derived_series.distribution, derived_series, DistributionJobType.DISTROSERIESDIFFERENCE, job_id, make_metadata(spn_id, parent_series.id)) for job_id, spn_id in zip(job_ids, spn_ids)], get_primary_keys=True)
def test_zero_values_is_noop(self): # create()ing 0 rows is a no-op. with StormStatementRecorder() as recorder: self.assertEqual([], bulk.create((BugSubscription.bug, ), [], get_objects=True)) self.assertThat(recorder, HasQueryCount(Equals(0)))
def ensure(cls, concrete_artifacts): """See `IAccessArtifactSource`.""" from lp.blueprints.interfaces.specification import ISpecification from lp.bugs.interfaces.bug import IBug from lp.code.interfaces.branch import IBranch existing = list(cls.find(concrete_artifacts)) if len(existing) == len(concrete_artifacts): return existing # Not everything exists. Create missing ones. needed = ( set(concrete_artifacts) - set(abstract.concrete_artifact for abstract in existing)) insert_values = [] for concrete in needed: if IBug.providedBy(concrete): insert_values.append((concrete, None, None)) elif IBranch.providedBy(concrete): insert_values.append((None, concrete, None)) elif ISpecification.providedBy(concrete): insert_values.append((None, None, concrete)) else: raise ValueError("%r is not a supported artifact" % concrete) new = create( (cls.bug, cls.branch, cls.specification), insert_values, get_objects=True) return list(existing) + new
def test_zero_values_is_noop(self): # create()ing 0 rows is a no-op. with StormStatementRecorder() as recorder: self.assertEqual( [], bulk.create((BugSubscription.bug,), [], get_objects=True)) self.assertThat(recorder, HasQueryCount(Equals(0)))
def createMultiple(cls, copy_tasks, requester, copy_policy=PackageCopyPolicy.INSECURE, include_binaries=False, sponsored=None, unembargo=False, auto_approve=False, silent=False): """See `IPlainPackageCopyJobSource`.""" store = IMasterStore(Job) job_ids = Job.createMultiple(store, len(copy_tasks), requester) job_contents = [ cls._composeJobInsertionTuple(copy_policy, include_binaries, job_id, task, sponsored, unembargo, auto_approve, silent) for job_id, task in zip(job_ids, copy_tasks) ] return bulk.create( (PackageCopyJob.job_type, PackageCopyJob.target_distroseries, PackageCopyJob.copy_policy, PackageCopyJob.source_archive, PackageCopyJob.target_archive, PackageCopyJob.package_name, PackageCopyJob.job_id, PackageCopyJob.metadata), job_contents, get_primary_keys=True)
def storeRemoteProductsAndComponents(self, bz_bugtracker, lp_bugtracker): """Stores parsed product/component data from bz_bugtracker""" components_to_add = [] for product in bz_bugtracker.products.itervalues(): # Look up the component group id from Launchpad for the product # if it already exists. Otherwise, add it. lp_component_group = lp_bugtracker.getRemoteComponentGroup( product['name']) if lp_component_group is None: lp_component_group = lp_bugtracker.addRemoteComponentGroup( product['name']) if lp_component_group is None: self.logger.warning("Failed to add new component group") continue else: for component in lp_component_group.components: if (component.name in product['components'] or component.is_visible == False or component.is_custom == True): # We already know something about this component, # or a user has configured it, so ignore it del product['components'][component.name] else: # Component is now missing from Bugzilla, # so drop it here too store = IStore(BugTrackerComponent) store.find( BugTrackerComponent, BugTrackerComponent.id == component.id, ).remove() # The remaining components in the collection will need to be # added to launchpad. Record them for now. for component in product['components'].values(): components_to_add.append( (component['name'], lp_component_group, True, False)) if len(components_to_add) > 0: self.logger.debug("...Inserting components into database") bulk.create( (BugTrackerComponent.name, BugTrackerComponent.component_group, BugTrackerComponent.is_visible, BugTrackerComponent.is_custom), components_to_add) transaction.commit() self.logger.debug("...Done")
def test_null_reference(self): # create() handles None as a Reference value. job = IStore(Job).add(Job()) wanted = [(None, job, BranchJobType.RECLAIM_BRANCH_SPACE)] [branchjob] = bulk.create( (BranchJob.branch, BranchJob.job, BranchJob.job_type), wanted, get_objects=True) self.assertEqual( wanted, [(branchjob.branch, branchjob.job, branchjob.job_type)])
def createForTeams(cls, teams): insert_values = [] for team in teams: if team is None or not team.is_team: raise ValueError("A team must be specified") insert_values.append((None, None, None, team)) return create( (cls.product, cls.distribution, cls.type, cls.person), insert_values, get_objects=True)
def createForTeams(cls, teams): insert_values = [] for team in teams: if team is None or not team.is_team: raise ValueError("A team must be specified") insert_values.append((None, None, None, team)) return create((cls.product, cls.distribution, cls.type, cls.person), insert_values, get_objects=True)
def create(self, xrefs): # All references are currently to local objects, so add # backlinks as well to keep queries in both directions quick. # The *_id_int columns are also set if the ID looks like an int. rows = [] for from_, tos in xrefs.items(): for to, props in tos.items(): rows.append( (from_[0], from_[1], _int_or_none(from_[1]), to[0], to[1], _int_or_none(to[1]), props.get('creator'), props.get('date_created', UTC_NOW), props.get('metadata'))) rows.append( (to[0], to[1], _int_or_none(to[1]), from_[0], from_[1], _int_or_none(from_[1]), props.get('creator'), props.get('date_created', UTC_NOW), props.get('metadata'))) bulk.create((XRef.from_type, XRef.from_id, XRef.from_id_int, XRef.to_type, XRef.to_id, XRef.to_id_int, XRef.creator, XRef.date_created, XRef.metadata), rows)
def createMultiple(self, store, num_jobs, requester=None): """Create multiple `Job`s at once. :param store: `Store` to ceate the jobs in. :param num_jobs: Number of `Job`s to create. :param request: The `IPerson` requesting the jobs. :return: An iterable of `Job.id` values for the new jobs. """ return bulk.create( (Job._status, Job.requester), [(JobStatus.WAITING, requester) for i in range(num_jobs)], get_primary_keys=True)
def test_can_return_ids(self): # create() can be asked to return the created IDs instead of objects. job = IStore(Job).add(Job()) IStore(Job).flush() wanted = [(None, job, BranchJobType.RECLAIM_BRANCH_SPACE)] with StormStatementRecorder() as recorder: [created_id] = bulk.create( (BranchJob.branch, BranchJob.job, BranchJob.job_type), wanted, get_primary_keys=True) self.assertThat(recorder, HasQueryCount(Equals(1))) [reclaimjob] = ReclaimBranchSpaceJob.iterReady() self.assertEqual(created_id, reclaimjob.context.id)
def test_sql_passed_through(self): # create() passes SQL() expressions through untouched. bug = self.factory.makeBug() person = self.factory.makePerson() [sub] = bulk.create( (BugSubscription.bug, BugSubscription.person, BugSubscription.subscribed_by, BugSubscription.date_created, BugSubscription.bug_notification_level), [(bug, person, person, SQL("CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"), BugNotificationLevel.LIFECYCLE)], get_objects=True) self.assertEqual(get_transaction_timestamp(), sub.date_created)
def apply_bugsummary_changes(target, added, updated, removed): """Apply a set of BugSummary changes to the DB.""" bits = _get_bugsummary_constraint_bits(target) target_key = tuple( map(bits.__getitem__, ('product_id', 'productseries_id', 'distribution_id', 'distroseries_id', 'sourcepackagename_id'))) target_cols = (RawBugSummary.product_id, RawBugSummary.productseries_id, RawBugSummary.distribution_id, RawBugSummary.distroseries_id, RawBugSummary.sourcepackagename_id) key_cols = (RawBugSummary.status, RawBugSummary.milestone_id, RawBugSummary.importance, RawBugSummary.has_patch, RawBugSummary.tag, RawBugSummary.viewed_by_id, RawBugSummary.access_policy_id) # Postgres doesn't do bulk updates, so do a delete+add. for key, count in updated.iteritems(): removed.append(key) added[key] = count # Delete any excess rows. We do it in batches of 100 to avoid enormous ORs while removed: chunk = removed[:100] removed = removed[100:] exprs = [ map(lambda k_v: k_v[0] == k_v[1], zip(key_cols, key)) for key in chunk ] IStore(RawBugSummary).find( RawBugSummary, Or(*[And(*expr) for expr in exprs]), *get_bugsummary_constraint(target)).remove() # Add any new rows. We know this scales up to tens of thousands, so just # do it in one hit. if added: create( target_cols + key_cols + (RawBugSummary.count, ), [target_key + key + (count, ) for key, count in added.iteritems()])
def createMultiple(self, store, num_jobs, requester=None): """Create multiple `Job`s at once. :param store: `Store` to ceate the jobs in. :param num_jobs: Number of `Job`s to create. :param request: The `IPerson` requesting the jobs. :return: An iterable of `Job.id` values for the new jobs. """ return bulk.create( (Job._status, Job.requester), [(JobStatus.WAITING, requester) for i in range(num_jobs)], get_primary_keys=True, )
def addNotification(self, bug, is_comment, message, recipients, activity, deferred=False): """See `IBugNotificationSet`.""" if deferred: status = BugNotificationStatus.DEFERRED else: if not recipients: return status = BugNotificationStatus.PENDING bug_notification = BugNotification(bug=bug, is_comment=is_comment, message=message, date_emailed=None, activity=activity, status=status) store = Store.of(bug_notification) # XXX jamesh 2008-05-21: these flushes are to fix ordering # problems in the bugnotification-sending.txt tests. store.flush() bulk.create( (BugNotificationRecipient.bug_notification, BugNotificationRecipient.person, BugNotificationRecipient.reason_body, BugNotificationRecipient.reason_header), [(bug_notification, recipient) + recipients.getReason(recipient) for recipient in recipients]) bulk.create((BugNotificationFilter.bug_notification, BugNotificationFilter.bug_subscription_filter), [(bug_notification, filter) for filter in recipients.subscription_filters]) return bug_notification
def create(cls, policies): from lp.registry.interfaces.distribution import IDistribution from lp.registry.interfaces.product import IProduct insert_values = [] for pillar, type in policies: if IProduct.providedBy(pillar): insert_values.append((pillar, None, type)) elif IDistribution.providedBy(pillar): insert_values.append((None, pillar, type)) else: raise ValueError("%r is not a supported pillar" % pillar) return create((cls.product, cls.distribution, cls.type), insert_values, get_objects=True)
def create(cls, policies): from lp.registry.interfaces.distribution import IDistribution from lp.registry.interfaces.product import IProduct insert_values = [] for pillar, type in policies: if IProduct.providedBy(pillar): insert_values.append((pillar, None, type)) elif IDistribution.providedBy(pillar): insert_values.append((None, pillar, type)) else: raise ValueError("%r is not a supported pillar" % pillar) return create( (cls.product, cls.distribution, cls.type), insert_values, get_objects=True)
def test_load_can_be_skipped(self): # create() can be told not to load the created rows. job = IStore(Job).add(Job()) IStore(Job).flush() wanted = [(None, job, BranchJobType.RECLAIM_BRANCH_SPACE)] with StormStatementRecorder() as recorder: self.assertIs( None, bulk.create( (BranchJob.branch, BranchJob.job, BranchJob.job_type), wanted, get_objects=False)) self.assertThat(recorder, HasQueryCount(Equals(1))) [reclaimjob] = ReclaimBranchSpaceJob.iterReady() branchjob = reclaimjob.context self.assertEqual( wanted, [(branchjob.branch, branchjob.job, branchjob.job_type)])
def test_references_and_enums(self): # create() correctly compiles plain types, enums and references. bug = self.factory.makeBug() people = [self.factory.makePerson() for i in range(5)] wanted = [(bug, person, person, datetime.datetime.now(UTC), BugNotificationLevel.LIFECYCLE) for person in people] with StormStatementRecorder() as recorder: subs = bulk.create( (BugSubscription.bug, BugSubscription.person, BugSubscription.subscribed_by, BugSubscription.date_created, BugSubscription.bug_notification_level), wanted, get_objects=True) self.assertThat(recorder, HasQueryCount(Equals(2))) self.assertContentEqual(wanted, ((sub.bug, sub.person, sub.subscribed_by, sub.date_created, sub.bug_notification_level) for sub in subs))
def test_references_and_enums(self): # create() correctly compiles plain types, enums and references. bug = self.factory.makeBug() people = [self.factory.makePerson() for i in range(5)] wanted = [ (bug, person, person, datetime.datetime.now(UTC), BugNotificationLevel.LIFECYCLE) for person in people] with StormStatementRecorder() as recorder: subs = bulk.create( (BugSubscription.bug, BugSubscription.person, BugSubscription.subscribed_by, BugSubscription.date_created, BugSubscription.bug_notification_level), wanted, get_objects=True) self.assertThat(recorder, HasQueryCount(Equals(2))) self.assertContentEqual( wanted, ((sub.bug, sub.person, sub.subscribed_by, sub.date_created, sub.bug_notification_level) for sub in subs))
def grant(cls, grants): """See `IAccessPolicyGrantSource`.""" return create((cls.policy, cls.grantee, cls.grantor), grants, get_objects=True)
def grant(cls, grants): """See `IAccessArtifactGrantSource`.""" return create((cls.abstract_artifact, cls.grantee, cls.grantor), grants, get_objects=True)
def create(cls, links): """See `IAccessPolicyArtifactSource`.""" return create((cls.abstract_artifact, cls.policy), links, get_objects=True)
def create(cls, links): """See `IAccessPolicyArtifactSource`.""" return create( (cls.abstract_artifact, cls.policy), links, get_objects=True)
def grant(cls, grants): """See `IAccessArtifactGrantSource`.""" return create( (cls.abstract_artifact, cls.grantee, cls.grantor), grants, get_objects=True)
while removed: chunk = removed[:100] removed = removed[100:] exprs = [ map(lambda (k, v): k == v, zip(key_cols, key)) for key in chunk] IStore(RawBugSummary).find( RawBugSummary, Or(*[And(*expr) for expr in exprs]), *get_bugsummary_constraint(target)).remove() # Add any new rows. We know this scales up to tens of thousands, so just # do it in one hit. if added: create( target_cols + key_cols + (RawBugSummary.count,), [target_key + key + (count,) for key, count in added.iteritems()]) def rebuild_bugsummary_for_target(target, log): log.debug("Rebuilding %s" % format_target(target)) existing = dict( (v[:-1], v[-1]) for v in get_bugsummary_rows(target)) expected = dict( (v[:-1], v[-1]) for v in calculate_bugsummary_rows(target)) added, updated, removed = calculate_bugsummary_changes(existing, expected) if added: log.debug('Added %r' % added) if updated: log.debug('Updated %r' % updated) if removed:
def newFromBazaarRevisions(self, revisions): """See `IRevisionSet`.""" # Find all author names for these revisions. author_names = [] for bzr_revision in revisions: authors = bzr_revision.get_apparent_authors() try: author = authors[0] except IndexError: author = None author_names.append(author) # Get or make every RevisionAuthor for these revisions. revision_authors = dict( (name, author.id) for name, author in self.acquireRevisionAuthors(author_names).items()) # Collect all data for making Revision objects. data = [] for bzr_revision, author_name in zip(revisions, author_names): revision_id = bzr_revision.revision_id revision_date = self._timestampToDatetime(bzr_revision.timestamp) revision_author = revision_authors[author_name] data.append( (revision_id, bzr_revision.message, revision_date, revision_author)) # Create all Revision objects. db_revisions = create(( Revision.revision_id, Revision.log_body, Revision.revision_date, Revision.revision_author_id), data, get_objects=True) # Map revision_id to Revision database ID. revision_db_id = dict( (rev.revision_id, rev.id) for rev in db_revisions) # Collect all data for making RevisionParent and RevisionProperty # objects. parent_data = [] property_data = [] for bzr_revision in revisions: db_id = revision_db_id[bzr_revision.revision_id] # Property data: revision DB id, name, value. for name, value in bzr_revision.properties.iteritems(): # pristine-tar properties can be huge, and storing them # in the database provides no value. Exclude them. if name.startswith('deb-pristine-delta'): continue property_data.append((db_id, name, value)) parent_ids = bzr_revision.parent_ids # Parent data: revision DB id, sequence, revision_id seen_parents = set() for sequence, parent_id in enumerate(parent_ids): if parent_id in seen_parents: continue seen_parents.add(parent_id) parent_data.append((db_id, sequence, parent_id)) # Create all RevisionParent objects. create(( RevisionParent.revisionID, RevisionParent.sequence, RevisionParent.parent_id), parent_data) # Create all RevisionProperty objects. create(( RevisionProperty.revisionID, RevisionProperty.name, RevisionProperty.value), property_data)
# Delete any excess rows. We do it in batches of 100 to avoid enormous ORs while removed: chunk = removed[:100] removed = removed[100:] exprs = [ map(lambda (k, v): k == v, zip(key_cols, key)) for key in chunk ] IStore(RawBugSummary).find( RawBugSummary, Or(*[And(*expr) for expr in exprs]), *get_bugsummary_constraint(target)).remove() # Add any new rows. We know this scales up to tens of thousands, so just # do it in one hit. if added: create( target_cols + key_cols + (RawBugSummary.count, ), [target_key + key + (count, ) for key, count in added.iteritems()]) def rebuild_bugsummary_for_target(target, log): log.debug("Rebuilding %s" % format_target(target)) existing = dict((v[:-1], v[-1]) for v in get_bugsummary_rows(target)) expected = dict((v[:-1], v[-1]) for v in calculate_bugsummary_rows(target)) added, updated, removed = calculate_bugsummary_changes(existing, expected) if added: log.debug('Added %r' % added) if updated: log.debug('Updated %r' % updated) if removed: log.debug('Removed %r' % removed) apply_bugsummary_changes(target, added, updated, removed)
def newFromBazaarRevisions(self, revisions): """See `IRevisionSet`.""" # Find all author names for these revisions. author_names = [] for bzr_revision in revisions: authors = bzr_revision.get_apparent_authors() try: author = authors[0] except IndexError: author = None author_names.append(author) # Get or make every RevisionAuthor for these revisions. revision_authors = dict( (name, author.id) for name, author in self.acquireRevisionAuthors( author_names).items()) # Collect all data for making Revision objects. data = [] for bzr_revision, author_name in zip(revisions, author_names): revision_id = bzr_revision.revision_id revision_date = self._timestampToDatetime(bzr_revision.timestamp) revision_author = revision_authors[author_name] data.append((revision_id, bzr_revision.message, revision_date, revision_author)) # Create all Revision objects. db_revisions = create( (Revision.revision_id, Revision.log_body, Revision.revision_date, Revision.revision_author_id), data, get_objects=True) # Map revision_id to Revision database ID. revision_db_id = dict( (rev.revision_id, rev.id) for rev in db_revisions) # Collect all data for making RevisionParent and RevisionProperty # objects. parent_data = [] property_data = [] for bzr_revision in revisions: db_id = revision_db_id[bzr_revision.revision_id] # Property data: revision DB id, name, value. for name, value in bzr_revision.properties.iteritems(): # pristine-tar properties can be huge, and storing them # in the database provides no value. Exclude them. if name.startswith('deb-pristine-delta'): continue property_data.append((db_id, name, value)) parent_ids = bzr_revision.parent_ids # Parent data: revision DB id, sequence, revision_id seen_parents = set() for sequence, parent_id in enumerate(parent_ids): if parent_id in seen_parents: continue seen_parents.add(parent_id) parent_data.append((db_id, sequence, parent_id)) # Create all RevisionParent objects. create((RevisionParent.revisionID, RevisionParent.sequence, RevisionParent.parent_id), parent_data) # Create all RevisionProperty objects. create((RevisionProperty.revisionID, RevisionProperty.name, RevisionProperty.value), property_data)
def grant(cls, grants): """See `IAccessPolicyGrantSource`.""" return create( (cls.policy, cls.grantee, cls.grantor), grants, get_objects=True)