def _getInvalidTokens(self): """Return all invalid tokens. A token is invalid if it is active and the token owner is *not* a subscriber to the archive that the token is for. The subscription can be either direct or through a team. """ # First we grab all the active tokens for which there is a # matching current archive subscription for a team of which the # token owner is a member. store = IStore(ArchiveSubscriber) valid_tokens = store.find( ArchiveAuthToken, ArchiveAuthToken.date_deactivated == None, ArchiveAuthToken.archive_id == ArchiveSubscriber.archive_id, ArchiveSubscriber.status == ArchiveSubscriberStatus.CURRENT, ArchiveSubscriber.subscriber_id == TeamParticipation.teamID, TeamParticipation.personID == ArchiveAuthToken.person_id) # We can then evaluate the invalid tokens by the difference of # all active tokens and valid tokens. all_active_tokens = store.find( ArchiveAuthToken, ArchiveAuthToken.date_deactivated == None) return all_active_tokens.difference(valid_tokens)
def getForDistroSeries(distroseries, since=None, source_package_name=None): """See `IDistroSeriesDifferenceCommentSource`.""" # Avoid circular imports. from lp.registry.model.distroseriesdifference import ( DistroSeriesDifference, ) store = IStore(DistroSeriesDifferenceComment) DSD = DistroSeriesDifference DSDComment = DistroSeriesDifferenceComment conditions = [ DSDComment.distro_series_difference_id == DSD.id, DSD.derived_series_id == distroseries.id, ] if source_package_name is not None: conditions += [ SourcePackageName.id == DSD.source_package_name_id, SourcePackageName.name == source_package_name, ] if since is not None: older_messages = store.find( Message.id, Message.datecreated < since).order_by( Desc(Message.datecreated)) preceding_message = older_messages.first() if preceding_message is not None: conditions.append(DSDComment.message_id > preceding_message) return store.find(DSDComment, *conditions).order_by( DSDComment.message_id)
def getForDistroSeries(distroseries, since=None, source_package_name=None): """See `IDistroSeriesDifferenceCommentSource`.""" # Avoid circular imports. from lp.registry.model.distroseriesdifference import ( DistroSeriesDifference, ) store = IStore(DistroSeriesDifferenceComment) DSD = DistroSeriesDifference DSDComment = DistroSeriesDifferenceComment conditions = [ DSDComment.distro_series_difference_id == DSD.id, DSD.derived_series_id == distroseries.id, ] if source_package_name is not None: conditions += [ SourcePackageName.id == DSD.source_package_name_id, SourcePackageName.name == source_package_name, ] if since is not None: older_messages = store.find(Message.id, Message.datecreated < since).order_by( Desc(Message.datecreated)) preceding_message = older_messages.first() if preceding_message is not None: conditions.append(DSDComment.message_id > preceding_message) return store.find(DSDComment, *conditions).order_by(DSDComment.message_id)
def _getInvalidTokens(self): """Return all invalid tokens. A token is invalid if it is active and the token owner is *not* a subscriber to the archive that the token is for. The subscription can be either direct or through a team. """ # First we grab all the active tokens for which there is a # matching current archive subscription for a team of which the # token owner is a member. store = IStore(ArchiveSubscriber) valid_tokens = store.find( ArchiveAuthToken, ArchiveAuthToken.name == None, ArchiveAuthToken.date_deactivated == None, ArchiveAuthToken.archive_id == ArchiveSubscriber.archive_id, ArchiveSubscriber.status == ArchiveSubscriberStatus.CURRENT, ArchiveSubscriber.subscriber_id == TeamParticipation.teamID, TeamParticipation.personID == ArchiveAuthToken.person_id) # We can then evaluate the invalid tokens by the difference of # all active tokens and valid tokens. all_active_tokens = store.find( ArchiveAuthToken, ArchiveAuthToken.name == None, ArchiveAuthToken.date_deactivated == None) return all_active_tokens.difference(valid_tokens)
def getByPackageAndName(self, package_name, name): """See `IHWDriverSet`.""" store = IStore(HWDriver) if package_name in (None, ""): return store.find( HWDriver, Or(HWDriver.package_name == None, HWDriver.package_name == ""), HWDriver.name == name ).one() else: return store.find(HWDriver, HWDriver.package_name == package_name, HWDriver.name == name).one()
def getCounts(self): """Return the number of rows in core revision-related tables. :return: (num_revisions, num_branch_revisions, num_revision_parents, num_revision_authors) """ store = IStore(Revision) return (store.find(Revision).count(), store.find(BranchRevision).count(), store.find(RevisionParent).count(), store.find(RevisionAuthor).count())
def test_ensure_spph_dsp_in_db_exists(self): # The DSP.ensure() class method does not create duplicate # persistent instances; it skips the query to create the DSP. store = IStore(DistributionSourcePackageInDatabase) start_count = store.find(DistributionSourcePackageInDatabase).count() spph = self.factory.makeSourcePackagePublishingHistory() DistributionSourcePackage.ensure(spph) new_count = store.find(DistributionSourcePackageInDatabase).count() self.assertEqual(start_count + 1, new_count) final_count = store.find(DistributionSourcePackageInDatabase).count() self.assertEqual(new_count, final_count)
def getByPackageAndName(self, package_name, name): """See `IHWDriverSet`.""" store = IStore(HWDriver) if package_name in (None, ''): return store.find( HWDriver, Or(HWDriver.package_name == None, HWDriver.package_name == ''), HWDriver.name == name).one() else: return store.find( HWDriver, HWDriver.package_name == package_name, HWDriver.name == name).one()
def getCounts(self): """Return the number of rows in core revision-related tables. :return: (num_revisions, num_branch_revisions, num_revision_parents, num_revision_authors) """ store = IStore(Revision) return ( store.find(Revision).count(), store.find(BranchRevision).count(), store.find(RevisionParent).count(), store.find(RevisionAuthor).count(), )
class RevisionCacheTestCase(TestCaseWithFactory): """Base class for RevisionCache tests.""" layer = DatabaseFunctionalLayer def setUp(self): # Login as an admin as we don't care about permissions here. TestCaseWithFactory.setUp(self, '*****@*****.**') self.store = IStore(RevisionCache) # There should be no RevisionCache entries in the test data. assert self.store.find(RevisionCache).count() == 0 def _getRevisionCache(self): return list(self.store.find(RevisionCache) .order_by(RevisionCache.revision_id))
class RevisionCacheTestCase(TestCaseWithFactory): """Base class for RevisionCache tests.""" layer = DatabaseFunctionalLayer def setUp(self): # Login as an admin as we don't care about permissions here. TestCaseWithFactory.setUp(self, '*****@*****.**') self.store = IStore(RevisionCache) # There should be no RevisionCache entries in the test data. assert self.store.find(RevisionCache).count() == 0 def _getRevisionCache(self): return list( self.store.find(RevisionCache).order_by(RevisionCache.revision_id))
def test_with_snapbuild_metadata_as_none_set_status(self): db_build = self.factory.makeSnapBuild() unsecure_db_build = removeSecurityProxy(db_build) unsecure_db_build.store_upload_metadata = None store = IStore(SnapBuild) store.flush() loaded_build = store.find(SnapBuild, id=unsecure_db_build.id).one() job = SnapStoreUploadJob.create(loaded_build) job.status_url = 'http://example.org' store.flush() loaded_build = store.find(SnapBuild, id=unsecure_db_build.id).one() self.assertEqual('http://example.org', loaded_build.store_upload_metadata['status_url'])
def query_pofiletranslator(self, pofile, person): """Query `POFileTranslator` for a specific record. :return: Storm result set. """ store = IStore(pofile) return store.find(POFileTranslator, pofile=pofile, person=person)
def get_files_to_parse(file_paths): """Return an iterator of file and position where reading should start. The lines read from that position onwards will be the ones that have not been parsed yet. :param file_paths: The paths to the files. """ store = IStore(ParsedApacheLog) for file_path in file_paths: fd, file_size = get_fd_and_file_size(file_path) first_line = unicode(fd.readline()) parsed_file = store.find(ParsedApacheLog, first_line=first_line).one() position = 0 if parsed_file is not None: # This file has been parsed already; we'll now check if there's # anything in it that hasn't been parsed yet. if parsed_file.bytes_read >= file_size: # There's nothing new in it for us to parse, so just skip it. fd.close() continue else: # This one has stuff we haven't parsed yet, so we'll just # parse what's new. position = parsed_file.bytes_read yield fd, position
def load_referencing(object_type, owning_objects, reference_keys, extra_conditions=[]): """Load objects of object_type that reference owning_objects. Note that complex types like Person are best loaded through dedicated helpers that can eager load other related things (e.g. validity for Person). :param object_type: The object type to load - e.g. BranchSubscription. :param owning_objects: The objects which are referenced. E.g. [Branch()] At this point, all the objects should be of the same type, but that constraint could be lifted in future. :param reference_keys: A list of attributes that should be used to select object_type keys. e.g. ['branchID'] :param extra_conditions: A list of Storm clauses that will be used in the final query. :return: A list of object_type where any of reference_keys refered to the primary key of any of owning_objects. """ store = IStore(object_type) if type(owning_objects) not in (list, tuple): owning_objects = tuple(owning_objects) if not owning_objects: return [] exemplar = owning_objects[0] primary_key = _primary_key(get_type(exemplar)) attribute = primary_key.name ids = set(map(attrgetter(attribute), owning_objects)) conditions = [] # Note to future self doing perf tuning: may want to make ids a WITH # clause. for column in map(partial(getattr, object_type), reference_keys): conditions.append(column.is_in(ids)) return list(store.find(object_type, Or(conditions), *extra_conditions))
def getDeferredNotifications(self): """See `IBugNoticationSet`.""" store = IStore(BugNotification) results = store.find( BugNotification, BugNotification.date_emailed == None, BugNotification.status == BugNotificationStatus.DEFERRED) return results
def getRevisionsNeedingKarmaAllocated(limit=None): """See `IRevisionSet`.""" store = IStore(Revision) results = store.find( Revision, Revision.karma_allocated == False)[:limit] return results
def most_recent_comments(dsds): """The most recent comments for the given `DistroSeriesDifference`s. Returns an `IResultSet` that yields a single column of `DistroSeriesDifferenceComment`. :param dsds: An iterable of `DistroSeriesDifference` instances. """ columns = ( DistroSeriesDifferenceComment, Message, ) conditions = And( DistroSeriesDifferenceComment .distro_series_difference_id.is_in(dsd.id for dsd in dsds), Message.id == DistroSeriesDifferenceComment.message_id) order_by = ( DistroSeriesDifferenceComment.distro_series_difference_id, Desc(DistroSeriesDifferenceComment.id), ) distinct_on = ( DistroSeriesDifferenceComment.distro_series_difference_id, ) store = IStore(DistroSeriesDifferenceComment) comments = store.find( columns, conditions).order_by(*order_by).config(distinct=distinct_on) return DecoratedResultSet(comments, itemgetter(0))
def getBugWatchesForRemoteBug(self, remote_bug, bug_watch_ids=None): """See `IBugWatchSet`.""" query = IStore(BugWatch).find(BugWatch, BugWatch.remotebug == remote_bug) if bug_watch_ids is not None: query = query.find(BugWatch.id.is_in(bug_watch_ids)) return query
def create_multiple_jobs(derived_series, parent_series): """Create `DistroSeriesDifferenceJob`s between parent and derived series. :param derived_series: A `DistroSeries` that is assumed to be derived from another one. :param parent_series: A `DistroSeries` that is a parent of `derived_series`. :return: A list of newly-created `DistributionJob` ids. """ store = IStore(SourcePackagePublishingHistory) spn_ids = store.find( SourcePackagePublishingHistory.sourcepackagenameID, SourcePackagePublishingHistory.distroseries == derived_series.id, SourcePackagePublishingHistory.status.is_in(active_publishing_status)) spn_ids = list(spn_ids) if len(spn_ids) == 0: return [] job_ids = Job.createMultiple(store, len(spn_ids)) return bulk.create( (DistributionJob.distribution, DistributionJob.distroseries, DistributionJob.job_type, DistributionJob.job_id, DistributionJob.metadata), [(derived_series.distribution, derived_series, DistributionJobType.DISTROSERIESDIFFERENCE, job_id, make_metadata(spn_id, parent_series.id)) for job_id, spn_id in zip(job_ids, spn_ids)], get_primary_keys=True)
def query_structural_subscriptions( what, bug, bugtasks, level, exclude=None): """Query into structural subscriptions for a given bug. :param what: The fields to fetch. Choose from `Person`, `StructuralSubscription`, `BugSubscriptionFilter`, or a combo. :param bug: An `IBug` :param bugtasks: An iterable of `IBugTask`. :param level: A level from `BugNotificationLevel`. Filters below this level will be excluded. :param exclude: `Person`s to exclude (e.g. direct subscribers). """ from lp.registry.model.person import Person # Circular. filter_id_query = ( _get_structural_subscription_filter_id_query( bug, bugtasks, level, exclude)) if not filter_id_query: return EmptyResultSet() source = IStore(StructuralSubscription).using( StructuralSubscription, Join(BugSubscriptionFilter, BugSubscriptionFilter.structural_subscription_id == StructuralSubscription.id), Join(Person, Person.id == StructuralSubscription.subscriberID)) conditions = In( BugSubscriptionFilter.id, filter_id_query) return source.find(what, conditions)
def test_jobs_with_retry_exceptions_are_queued_again(self): # A job that raises a retry error is automatically queued # and executed again. self.useFixture( FeatureFixture( {'jobs.celery.enabled_classes': 'TestJobWithRetryError'})) with block_on_job(self): job = TestJobWithRetryError() job.celeryRunOnCommit() job_id = job.job_id transaction.commit() store = IStore(Job) # block_on_job() is not aware of the Celery request # issued when the retry exception occurs, but we can # check the status of the job in the database. def job_finished(): transaction.abort() dbjob = store.find(Job, id=job_id)[0] return (dbjob.status == JobStatus.COMPLETED and dbjob.attempt_count == 2) count = 0 while count < 300 and not job_finished(): # We have a maximum wait of one minute. We should not get # anywhere close to that on developer machines (10 seconds was # working fine), but when the test suite is run in parallel we # can need a lot more time (see bug 1007576). sleep(0.2) count += 1 dbjob = store.find(Job, id=job_id)[0] self.assertEqual(2, dbjob.attempt_count) self.assertEqual(JobStatus.COMPLETED, dbjob.status)
def featured_projects(self): """See `IPillarSet`.""" # Circular imports. from lp.registry.model.distribution import Distribution from lp.registry.model.product import Product from lp.registry.model.projectgroup import ProjectGroup store = IStore(PillarName) pillar_names = store.find(PillarName, PillarName.id == FeaturedProject.pillar_name) def preload_pillars(rows): pillar_names = (set(rows).union( load_related(PillarName, rows, ['alias_for']))) pillars = load_related(Product, pillar_names, ['productID']) pillars.extend( load_related(ProjectGroup, pillar_names, ['projectgroupID'])) pillars.extend( load_related(Distribution, pillar_names, ['distributionID'])) load_related(LibraryFileAlias, pillars, ['iconID']) return list( DecoratedResultSet(pillar_names, result_decorator=attrgetter('pillar'), pre_iter_hook=preload_pillars))
def calculateSourceOverrides(self, archive, distroseries, pocket, spns, source_component=None, include_deleted=False): def eager_load(rows): bulk.load(Component, (row[1] for row in rows)) bulk.load(Section, (row[2] for row in rows)) store = IStore(SourcePackagePublishingHistory) already_published = DecoratedResultSet(store.find( (SourcePackagePublishingHistory.sourcepackagenameID, SourcePackagePublishingHistory.componentID, SourcePackagePublishingHistory.sectionID), SourcePackagePublishingHistory.archiveID == archive.id, SourcePackagePublishingHistory.distroseriesID == distroseries.id, SourcePackagePublishingHistory.status.is_in( self.getExistingPublishingStatuses(include_deleted)), SourcePackagePublishingHistory.sourcepackagenameID.is_in( spn.id for spn in spns)).order_by( SourcePackagePublishingHistory.sourcepackagenameID, Desc(SourcePackagePublishingHistory.datecreated), Desc(SourcePackagePublishingHistory.id), ).config(distinct=( SourcePackagePublishingHistory.sourcepackagenameID, )), id_resolver( (SourcePackageName, Component, Section)), pre_iter_hook=eager_load) return [ SourceOverride(name, component, section) for (name, component, section) in already_published ]
def get_contributions(pofile, potmsgset_ids): """Map all users' most recent contributions to a `POFile`. Returns a dict mapping `Person` id to the creation time of their most recent `TranslationMessage` in `POFile`. This leaves some small room for error: a contribution that is masked by a diverged entry in this POFile will nevertheless produce a POFileTranslator record. Fixing that would complicate the work more than it is probably worth. :param pofile: The `POFile` to find contributions for. :param potmsgset_ids: The ids of the `POTMsgSet`s to look for, as returned by `get_potmsgset_ids`. """ store = IStore(pofile) language_id = pofile.language.id template_id = pofile.potemplate.id contribs = store.find( (TranslationMessage.submitterID, TranslationMessage.date_created), TranslationMessage.potmsgsetID.is_in(potmsgset_ids), TranslationMessage.languageID == language_id, TranslationMessage.msgstr0 != None, Coalesce(TranslationMessage.potemplateID, template_id) == template_id) contribs = contribs.config(distinct=(TranslationMessage.submitterID, )) contribs = contribs.order_by(TranslationMessage.submitterID, Desc(TranslationMessage.date_created)) return dict(contribs)
def get_contributions(pofile, potmsgset_ids): """Map all users' most recent contributions to a `POFile`. Returns a dict mapping `Person` id to the creation time of their most recent `TranslationMessage` in `POFile`. This leaves some small room for error: a contribution that is masked by a diverged entry in this POFile will nevertheless produce a POFileTranslator record. Fixing that would complicate the work more than it is probably worth. :param pofile: The `POFile` to find contributions for. :param potmsgset_ids: The ids of the `POTMsgSet`s to look for, as returned by `get_potmsgset_ids`. """ store = IStore(pofile) language_id = pofile.language.id template_id = pofile.potemplate.id contribs = store.find( (TranslationMessage.submitterID, TranslationMessage.date_created), TranslationMessage.potmsgsetID.is_in(potmsgset_ids), TranslationMessage.languageID == language_id, TranslationMessage.msgstr0 != None, Coalesce(TranslationMessage.potemplateID, template_id) == template_id) contribs = contribs.config(distinct=(TranslationMessage.submitterID,)) contribs = contribs.order_by( TranslationMessage.submitterID, Desc(TranslationMessage.date_created)) return dict(contribs)
def getByDerivedAndParentSeries(self, derived_series, parent_series): """See `IDistroSeriesParentSet`.""" store = IStore(DistroSeriesParent) return store.find( DistroSeriesParent, DistroSeriesParent.parent_series_id == parent_series.id, DistroSeriesParent.derived_series_id == derived_series.id).one()
def relatedSets(self): """See `IPackageset`.""" store = IStore(Packageset) result_set = store.find( Packageset, Packageset.packagesetgroup == self.packagesetgroup, Packageset.id != self.id) return _order_result_set(result_set)
def test_jobs_with_retry_exceptions_are_queued_again(self): # A job that raises a retry error is automatically queued # and executed again. self.useFixture(FeatureFixture({ 'jobs.celery.enabled_classes': 'TestJobWithRetryError' })) with block_on_job(self): job = TestJobWithRetryError() job.celeryRunOnCommit() job_id = job.job_id transaction.commit() store = IStore(Job) # block_on_job() is not aware of the Celery request # issued when the retry exception occurs, but we can # check the status of the job in the database. def job_finished(): transaction.abort() dbjob = store.find(Job, id=job_id)[0] return ( dbjob.status == JobStatus.COMPLETED and dbjob.attempt_count == 2) count = 0 while count < 300 and not job_finished(): # We have a maximum wait of one minute. We should not get # anywhere close to that on developer machines (10 seconds was # working fine), but when the test suite is run in parallel we # can need a lot more time (see bug 1007576). sleep(0.2) count += 1 dbjob = store.find(Job, id=job_id)[0] self.assertEqual(2, dbjob.attempt_count) self.assertEqual(JobStatus.COMPLETED, dbjob.status)
def getBugWatchesForRemoteBug(self, remote_bug, bug_watch_ids=None): """See `IBugWatchSet`.""" query = IStore(BugWatch).find( BugWatch, BugWatch.remotebug == remote_bug) if bug_watch_ids is not None: query = query.find(BugWatch.id.is_in(bug_watch_ids)) return query
def sourcesNotSharedBy(self, other_package_set, direct_inclusion=False): """See `IPackageset`.""" if direct_inclusion == False: query = ''' SELECT pss_this.sourcepackagename FROM packagesetsources pss_this, flatpackagesetinclusion fpsi_this WHERE pss_this.packageset = fpsi_this.child AND fpsi_this.parent = ? EXCEPT SELECT pss_other.sourcepackagename FROM packagesetsources pss_other, flatpackagesetinclusion fpsi_other WHERE pss_other.packageset = fpsi_other.child AND fpsi_other.parent = ? ''' else: query = ''' SELECT pss_this.sourcepackagename FROM packagesetsources pss_this WHERE pss_this.packageset = ? EXCEPT SELECT pss_other.sourcepackagename FROM packagesetsources pss_other WHERE pss_other.packageset = ? ''' store = IStore(Packageset) source_names = SQL(query, (self.id, other_package_set.id)) result_set = store.find( SourcePackageName, SourcePackageName.id.is_in(source_names)) return _order_result_set(result_set)
def getRemoteComponentGroup(self, component_group_name): """See `IBugTracker`.""" component_group = None store = IStore(BugTrackerComponentGroup) if component_group_name is None: return None elif component_group_name.isdigit(): component_group_id = int(component_group_name) component_group = store.find( BugTrackerComponentGroup, BugTrackerComponentGroup.id == component_group_id).one() else: component_group = store.find( BugTrackerComponentGroup, BugTrackerComponentGroup.name == component_group_name).one() return component_group
def calculateSourceOverrides(self, archive, distroseries, pocket, spns, source_component=None, include_deleted=False): def eager_load(rows): bulk.load(Component, (row[1] for row in rows)) bulk.load(Section, (row[2] for row in rows)) store = IStore(SourcePackagePublishingHistory) already_published = DecoratedResultSet( store.find( (SourcePackagePublishingHistory.sourcepackagenameID, SourcePackagePublishingHistory.componentID, SourcePackagePublishingHistory.sectionID), SourcePackagePublishingHistory.archiveID == archive.id, SourcePackagePublishingHistory.distroseriesID == distroseries.id, SourcePackagePublishingHistory.status.is_in( self.getExistingPublishingStatuses(include_deleted)), SourcePackagePublishingHistory.sourcepackagenameID.is_in( spn.id for spn in spns)).order_by( SourcePackagePublishingHistory.sourcepackagenameID, Desc(SourcePackagePublishingHistory.datecreated), Desc(SourcePackagePublishingHistory.id), ).config( distinct=( SourcePackagePublishingHistory.sourcepackagenameID,)), id_resolver((SourcePackageName, Component, Section)), pre_iter_hook=eager_load) return [ SourceOverride(name, component, section) for (name, component, section) in already_published]
def most_recent_comments(dsds): """The most recent comments for the given `DistroSeriesDifference`s. Returns an `IResultSet` that yields a single column of `DistroSeriesDifferenceComment`. :param dsds: An iterable of `DistroSeriesDifference` instances. """ columns = ( DistroSeriesDifferenceComment, Message, ) conditions = And( DistroSeriesDifferenceComment.distro_series_difference_id.is_in( dsd.id for dsd in dsds), Message.id == DistroSeriesDifferenceComment.message_id) order_by = ( DistroSeriesDifferenceComment.distro_series_difference_id, Desc(DistroSeriesDifferenceComment.id), ) distinct_on = (DistroSeriesDifferenceComment.distro_series_difference_id, ) store = IStore(DistroSeriesDifferenceComment) comments = store.find( columns, conditions).order_by(*order_by).config(distinct=distinct_on) return DecoratedResultSet(comments, itemgetter(0))
def getNewPrivatePPAs(self, since=None): """Return the recently created private PPAs.""" store = IStore(Archive) extra_expr = [] if since: extra_expr = [Archive.date_created >= since] return store.find(Archive, Archive._private == True, *extra_expr)
def get_potmsgset_ids(potemplate_id): """Get the ids for each current `POTMsgSet` in a `POTemplate`.""" store = IStore(POTemplate) return store.find( TranslationTemplateItem.potmsgsetID, TranslationTemplateItem.potemplateID == potemplate_id, TranslationTemplateItem.sequence > 0)
def getByName(self, name, distroseries=None): """See `IPackagesetSet`.""" store = IStore(Packageset) if not isinstance(name, unicode): name = unicode(name, 'utf-8') ubuntu = getUtility(IDistributionSet).getByName(u'ubuntu') extra_args = [] if distroseries is not None: # If the user just passed a distro series name, look it up. if isinstance(distroseries, basestring): try: distroseries = ubuntu[distroseries] except NotFoundError: raise NoSuchPackageSet(distroseries) extra_args.append(Packageset.distroseries == distroseries) else: extra_args.append(Packageset.distroseries == ubuntu.currentseries) package_set = store.find( Packageset, Packageset.name == name, *extra_args).one() if package_set is None: raise NoSuchPackageSet(name) return package_set
def setsIncludingSource(self, sourcepackagename, distroseries=None, direct_inclusion=False): """See `IPackagesetSet`.""" sourcepackagename = self._nameToSourcePackageName(sourcepackagename) if direct_inclusion: query = ''' SELECT pss.packageset FROM packagesetsources pss WHERE pss.sourcepackagename = ? ''' else: query = ''' SELECT fpsi.parent FROM packagesetsources pss, flatpackagesetinclusion fpsi WHERE pss.sourcepackagename = ? AND pss.packageset = fpsi.child ''' store = IStore(Packageset) psets = SQL(query, (sourcepackagename.id,)) clauses = [Packageset.id.is_in(psets)] if distroseries: clauses.append(Packageset.distroseries == distroseries) result_set = store.find(Packageset, *clauses) return _order_result_set(result_set)
def storeRemoteProductsAndComponents(self, bz_bugtracker, lp_bugtracker): """Stores parsed product/component data from bz_bugtracker""" components_to_add = [] for product in bz_bugtracker.products.itervalues(): # Look up the component group id from Launchpad for the product # if it already exists. Otherwise, add it. lp_component_group = lp_bugtracker.getRemoteComponentGroup( product['name']) if lp_component_group is None: lp_component_group = lp_bugtracker.addRemoteComponentGroup( product['name']) if lp_component_group is None: self.logger.warning("Failed to add new component group") continue else: for component in lp_component_group.components: if (component.name in product['components'] or component.is_visible == False or component.is_custom == True): # We already know something about this component, # or a user has configured it, so ignore it del product['components'][component.name] else: # Component is now missing from Bugzilla, # so drop it here too store = IStore(BugTrackerComponent) store.find( BugTrackerComponent, BugTrackerComponent.id == component.id, ).remove() # The remaining components in the collection will need to be # added to launchpad. Record them for now. for component in product['components'].values(): components_to_add.append( (component['name'], lp_component_group, True, False)) if len(components_to_add) > 0: self.logger.debug("...Inserting components into database") bulk.create( (BugTrackerComponent.name, BugTrackerComponent.component_group, BugTrackerComponent.is_visible, BugTrackerComponent.is_custom), components_to_add) transaction.commit() self.logger.debug("...Done")
class RetryDepwaitTunableLoop(TunableLoop): # We don't write too much, and it's important that we're timely. # Ignore the replication lag and long transaction checks by using a # basic LoopTuner. tuner_class = LoopTuner maximum_chunk_size = 5000 def __init__(self, log, dry_run, abort_time=None): super(RetryDepwaitTunableLoop, self).__init__(log, abort_time) self.dry_run = dry_run self.start_at = 1 self.store = IStore(BinaryPackageBuild) def findBuilds(self): return self.store.find( BinaryPackageBuild, BinaryPackageBuild.id >= self.start_at, BinaryPackageBuild.status == BuildStatus.MANUALDEPWAIT, ).order_by(BinaryPackageBuild.id) def isDone(self): return self.findBuilds().is_empty() def __call__(self, chunk_size): bpbs = list(self.findBuilds()[:chunk_size]) sprs = load_related( SourcePackageRelease, bpbs, ['source_package_release_id']) load_related(SourcePackageName, sprs, ['sourcepackagenameID']) chroots = IStore(PocketChroot).find( PocketChroot, PocketChroot.distroarchseriesID.is_in( b.distro_arch_series_id for b in bpbs), PocketChroot.chroot != None) chroot_series = set(chroot.distroarchseriesID for chroot in chroots) for build in bpbs: if (build.distro_arch_series.distroseries.status == SeriesStatus.OBSOLETE or not build.can_be_retried or build.distro_arch_series_id not in chroot_series): continue try: build.updateDependencies() except UnparsableDependencies as e: self.log.error(e) continue if not build.dependencies: self.log.debug('Retrying %s', build.title) build.retry() build.buildqueue_record.score() self.start_at = bpbs[-1].id + 1 if not self.dry_run: transaction.commit() else: transaction.abort()
def find_team_participations(people, teams=None): """Find the teams the given people participate in. :param people: The people for which to query team participation. :param teams: Optionally, limit the participation check to these teams. This method performs its work with at most a single database query. It first does similar checks to those performed by IPerson.in_team() and it may turn out that no database query is required at all. """ teams_to_query = [] people_teams = {} def add_team_to_result(person, team): teams = people_teams.get(person) if teams is None: teams = set() people_teams[person] = teams teams.add(team) # Check for the simple cases - self membership etc. if teams: for team in teams: if team is None: continue for person in people: if team.id == person.id: add_team_to_result(person, team) continue if not team.is_team: continue teams_to_query.append(team) # Avoid circular imports from lp.registry.model.person import Person # We are either checking for membership of any team or didn't eliminate # all the specific team participation checks above. if teams_to_query or not teams: Team = ClassAlias(Person, 'Team') person_ids = [person.id for person in people] conditions = [ TeamParticipation.personID == Person.id, TeamParticipation.teamID == Team.id, Person.id.is_in(person_ids) ] team_ids = [team.id for team in teams_to_query] if team_ids: conditions.append(Team.id.is_in(team_ids)) store = IStore(Person) rs = store.find( (Person, Team), *conditions) for (person, team) in rs: add_team_to_result(person, team) return people_teams
def getForDifference(distro_series_difference, id): """See `IDistroSeriesDifferenceCommentSource`.""" store = IStore(DistroSeriesDifferenceComment) DSDComment = DistroSeriesDifferenceComment return store.find( DSDComment, DSDComment.distro_series_difference == distro_series_difference, DSDComment.id == id).one()
def getNewPrivatePPAs(self, since=None): """Return the recently created private PPAs.""" store = IStore(Archive) extra_expr = [] if since: extra_expr = [Archive.date_created >= since] return store.find( Archive, Archive._private == True, *extra_expr)
def getByBranch(cls, branch): """See `ITranslationTemplatesBuildJobSource`.""" store = IStore(BranchJob) branch_job = store.find(BranchJob, BranchJob.branch == branch).one() if branch_job is None: return None else: return cls(branch_job)
def get(self, id): """See `INameBlacklistSet`.""" try: id = int(id) except ValueError: return None store = IStore(NameBlacklist) return store.find(NameBlacklist, NameBlacklist.id == id).one()