def builds(self): """See `ILiveFS`.""" order_by = (NullsLast( Desc(Greatest(LiveFSBuild.date_started, LiveFSBuild.date_finished))), Desc(LiveFSBuild.date_created), Desc(LiveFSBuild.id)) return self._getBuilds(None, order_by)
def specifications(self, user, sort=None, quantity=None, filter=None, need_people=False, need_branches=False, need_workitems=False): """See IHasSpecifications.""" # need_* is provided only for interface compatibility and # need_*=True is not implemented. if filter is None: filter = set([SpecificationFilter.ACCEPTED]) tables, query = self.spec_filter_clause(user, filter) # import here to avoid circular deps from lp.blueprints.model.specification import Specification results = Store.of(self).using(*tables).find(Specification, *query) if sort == SpecificationSort.DATE: order = (Desc(SprintSpecification.date_created), Specification.id) distinct = [SprintSpecification.date_created, Specification.id] # we need to establish if the listing will show specs that have # been decided only, or will include proposed specs. if (SpecificationFilter.ALL not in filter and SpecificationFilter.PROPOSED not in filter): # this will show only decided specs so use the date the spec # was accepted or declined for the sprint order = (Desc(SprintSpecification.date_decided),) + order distinct = [SprintSpecification.date_decided] + distinct results = results.order_by(*order) else: assert sort is None or sort == SpecificationSort.PRIORITY # fall back to default, which is priority, descending. distinct = True if quantity is not None: results = results[:quantity] return results.config(distinct=distinct)
def completed_builds(self): """See `ILiveFS`.""" filter_term = (Not(LiveFSBuild.status.is_in(self._pending_states))) order_by = (NullsLast( Desc(Greatest(LiveFSBuild.date_started, LiveFSBuild.date_finished))), Desc(LiveFSBuild.id)) return self._getBuilds(filter_term, order_by)
def _entries(self): entries = IStore(self._table).using(*self._origin).find( self._table, *self._clauses) return entries.order_by( NullsFirst(Distribution.display_name), Desc(DistroSeries.date_created), Desc(SnappySeries.date_created))
def builds(self): """See `ISourcePackageRecipe`.""" order_by = (Desc( Greatest(SourcePackageRecipeBuild.date_started, SourcePackageRecipeBuild.date_finished)), Desc(SourcePackageRecipeBuild.date_created), Desc(SourcePackageRecipeBuild.id)) return self._getBuilds(None, order_by)
def completed_builds(self): """See `ISourcePackageRecipe`.""" filter_term = (SourcePackageRecipeBuild.status != BuildStatus.NEEDSBUILD) order_by = (Desc( Greatest(SourcePackageRecipeBuild.date_started, SourcePackageRecipeBuild.date_finished)), Desc(SourcePackageRecipeBuild.id)) return self._getBuilds(filter_term, order_by)
def _specification_sort(self, sort): """Return the storm sort order for 'specifications'. :param sort: As per HasSpecificationsMixin.specifications. """ # sort by priority descending, by default if sort is None or sort == SpecificationSort.PRIORITY: return (Desc(Specification.priority), Specification.definition_status, Specification.name) elif sort == SpecificationSort.DATE: return (Desc(Specification.datecreated), Specification.id)
def get_last_change_date(self): """ This method gets the date of the last change included in the log table """ if self.backend_is_bugzilla(): result = self.store.find(DBBugzillaIssuesLog) aux = result.order_by(Desc(DBBugzillaIssuesLog.date))[:1] for entry in aux: return entry.date elif self.backend_is_jira(): result = self.store.find(DBJiraIssuesLog) aux = result.order_by(Desc(DBJiraIssuesLog.date))[:1] for entry in aux: return entry.date return None
def get_top_participants(self, list_name, start, end, limit=None): """ Return all the participants between two given dates. :param list_name: The name of the mailing list in which this email should be searched. :param start: A datetime object representing the starting date of the interval to query. :param end: A datetime object representing the ending date of the interval to query. :param limit: Limit the number of participants to return. If None or not supplied, return them all. :returns: The list of thread-starting messages. """ number = Alias(Count(Email.sender_email), "number") part = self.db.find( (Email.sender_name, Email.sender_email, number), And( Email.list_name == unicode(list_name), Email.date >= start, Email.date < end, )).group_by(Email.sender_email, Email.sender_name).order_by(Desc(number)) if limit is not None: part = part.config(limit=limit) return list(part)
def get_thread_neighbors(self, list_name, thread_id): """ Return the previous and the next threads of the specified thread, in date order. :param list_name: The name of the mailing list to query. :param thread_id: The unique identifier of the thread as specified in the database. :returns: A couple formed of the older thread and the newer thread, in this order. :rtype: tuple """ thread = self.get_thread(list_name, thread_id) next_thread = self.db.find( Thread, And( Thread.list_name == unicode(list_name), Thread.date_active > thread.date_active, )).order_by(Thread.date_active) try: next_thread = next_thread[0] except IndexError: next_thread = None prev_thread = self.db.find( Thread, And( Thread.list_name == unicode(list_name), Thread.date_active < thread.date_active, )).order_by(Desc(Thread.date_active)) try: prev_thread = prev_thread[0] except IndexError: prev_thread = None return (prev_thread, next_thread)
def register_url(self, url): last_id = User._last_dyn_ids_cache.get(self.id) if last_id is None: result = yield self.dynamic_ids.find() result.order_by(Desc(DynamicID.updated_at), DynamicID.id) result.config(limit=1) result = yield result.one() if result is None: last_id = -1 else: last_id = result.id next_id = last_id + 1 if next_id > MAX_DYN_ID: next_id = 0 result = yield self.dynamic_ids.find(id=next_id) id_obj = yield result.one() if id_obj is None: id_obj = DynamicID(next_id, url) self.dynamic_ids.add(id_obj) else: id_obj.url = url User._last_dyn_ids_cache[self.id] = next_id User._ids_cache[self.id][next_id] = url returnValue(id_obj)
def get_last_modification_date(self, store, bugs_state, tracker_id): # get last modification date stored in the database for a given status # select date_last_updated as date from issues_ext_github order by date # desc limit 1; # get latest modified since ..: # https://api.github.com/repos/composer/composer/issues?page=1& #state=closed&per_page=100&sort=updated&direction=asc& #since=2012-05-28T21:11:28Z result = store.find(DBGithubIssueExt, DBGithubIssueExt.issue_id == DBIssue.id, DBIssue.tracker_id == DBTracker.id, DBTracker.id == tracker_id) #printdbg (str(Tracker(url, "github", "v3"))) if (bugs_state == OPEN_STATE): result = store.find(DBGithubIssueExt, DBGithubIssueExt.status == u"open", DBGithubIssueExt.issue_id == DBIssue.id, DBIssue.tracker_id == DBTracker.id, DBTracker.id == tracker_id) elif (bugs_state == CLOSED_STATE): result = store.find(DBGithubIssueExt, DBGithubIssueExt.status == u"closed", DBGithubIssueExt.issue_id == DBIssue.id, DBIssue.tracker_id == DBTracker.id, DBTracker.id == tracker_id) aux = result.order_by(Desc(DBGithubIssueExt.updated_at))[:1] for entry in aux: return entry.updated_at return None
def getForDistroSeries(distroseries, since=None, source_package_name=None): """See `IDistroSeriesDifferenceCommentSource`.""" # Avoid circular imports. from lp.registry.model.distroseriesdifference import ( DistroSeriesDifference, ) store = IStore(DistroSeriesDifferenceComment) DSD = DistroSeriesDifference DSDComment = DistroSeriesDifferenceComment conditions = [ DSDComment.distro_series_difference_id == DSD.id, DSD.derived_series_id == distroseries.id, ] if source_package_name is not None: conditions += [ SourcePackageName.id == DSD.source_package_name_id, SourcePackageName.name == source_package_name, ] if since is not None: older_messages = store.find(Message.id, Message.datecreated < since).order_by( Desc(Message.datecreated)) preceding_message = older_messages.first() if preceding_message is not None: conditions.append(DSDComment.message_id > preceding_message) return store.find(DSDComment, *conditions).order_by(DSDComment.message_id)
def get_last_modification_date(self, store, trk_id): # get last modification date stored in the database for a given status # select date_last_updated as date from issues_ext_github order by date # desc limit 1; # get latest modified since ..: # https://api.github.com/repos/composer/composer/issues?page=1& #state=closed&per_page=100&sort=updated&direction=asc& #since=2012-05-28T21:11:28Z # FIXME: the commented code is specific of tracker. In the case of meta-trackers # such as the one used in OpenStack (tracker that contains other trackers), that tracker # is always empty and the process starts from the very beginning. # In order to avoid this, the date_last_updated is independent of the tracker. # This change may face other issues in the future. An example of this is # when using in the same database two different trackers from Launchpad. # So this code works when having meta-trackers (the type of trackers we're using so far) result = store.find(DBLaunchpadIssueExt) #, #DBLaunchpadIssueExt.issue_id == DBIssue.id, #DBIssue.tracker_id == DBTracker.id, #DBTracker.id == trk_id) aux = result.order_by(Desc(DBLaunchpadIssueExt.date_last_updated))[:1] for entry in aux: return entry.date_last_updated return None
def pending_builds(self): """See `ILiveFS`.""" filter_term = (LiveFSBuild.status.is_in(self._pending_states)) # We want to order by date_created but this is the same as ordering # by id (since id increases monotonically) and is less expensive. order_by = Desc(LiveFSBuild.id) return self._getBuilds(filter_term, order_by)
def publishing_history(self): """See IDistroArchSeriesBinaryPackage.""" return IStore(BinaryPackagePublishingHistory).find( BinaryPackagePublishingHistory, *self._getPublicationJoins() ).config(distinct=True ).order_by(Desc(BinaryPackagePublishingHistory.datecreated))
def pending_builds(self): """See `ISourcePackageRecipe`.""" filter_term = ( SourcePackageRecipeBuild.status == BuildStatus.NEEDSBUILD) # We want to order by date_created but this is the same as ordering # by id (since id increases monotonically) and is less expensive. order_by = Desc(SourcePackageRecipeBuild.id) return self._getBuilds(filter_term, order_by)
def main(): options = get_options() samples = get_samples(options.regex) if samples.count() == 0: print("No sample found.") return for sample in samples: if sample.source_dataset.datatype == "data": continue # Consider a cross-section of one as a non-updated value if sample.source_dataset.xsection == 1: # Try to find a similar sample in the database, with the same center of mass energy print("Updating cross-section of {}".format( sample.source_dataset.process)) if options.force: print(" Forcing the cross-section to {}".format( options.force)) if options.write: sample.source_dataset.xsection = options.force else: possible_matches = dbstore.find( Dataset, Dataset.process.like(sample.source_dataset.process), Dataset.energy == sample.source_dataset.energy, Dataset.dataset_id != sample.source_dataset.dataset_id) xsec = None if possible_matches.count() == 0: print(" No match for this dataset found.") else: for p in possible_matches.order_by(Desc( Dataset.dataset_id)): if not xsec: xsec = p.xsection else: if xsec != p.xsection: print( " Warning: more than one possible match found for this dataset, and they do not have the same cross-section. I do not know what to do..." ) xsec = None break if xsec: print( " Updating with cross-section = {}".format(xsec)) if options.write: sample.source_dataset.xsection = xsec if options.write: dbstore.commit() else: print( "Currently running in dry-run mode. If you are happy with the change, pass the '-w' flag to this script to store the changes into the database." ) dbstore.rollback()
def reportTraceLogSummary(store, limit, endpoint=None): """Generator yields the slowest requests from the last 50000 requests. @param store: The C{Store} to fetch data from. @return: A sequence of C{(duration, endpoint, filename)} 3-tuples. """ result = store.find(TraceLog) result = result.order_by(Desc(TraceLog.duration)) result = result.config(limit=50000) subselect = result.get_select_expr(TraceLog.id) result = store.find(TraceLog, TraceLog.id.is_in(subselect)) result = result.order_by(Desc(TraceLog.duration)) result = result.config(limit=limit) result = result.values(TraceLog.duration, TraceLog.endpoint, TraceLog.sessionID) for duration, endpoint, sessionID in result: yield str(duration), endpoint, sessionID
def get_last_modification_date(self, store): # get last modification date (day) stored in the database # select date_last_updated as date from issues_ext_allura order by date result = store.find(DBAlluraIssueExt) aux = result.order_by(Desc(DBAlluraIssueExt.mod_date))[:1] for entry in aux: return entry.mod_date.strftime('%Y-%m-%dT%H:%M:%SZ') return None
def last_downloaded(self): """See `ILibraryFileAlias`.""" store = Store.of(self) results = store.find(LibraryFileDownloadCount, libraryfilealias=self) results.order_by(Desc(LibraryFileDownloadCount.day)) entry = results.first() if entry is None: return None else: return datetime.now(pytz.utc).date() - entry.day
def _update(cls, distroseries, binarypackagename, archive, log): """Update the package cache for a given IBinaryPackageName 'log' is required, it should be a logger object able to print DEBUG level messages. 'ztm' is the current trasaction manager used for partial commits (in full batches of 100 elements) """ # get the set of published binarypackagereleases bprs = IStore(BinaryPackageRelease).find( BinaryPackageRelease, BinaryPackageRelease.id == BinaryPackagePublishingHistory.binarypackagereleaseID, BinaryPackagePublishingHistory.binarypackagename == binarypackagename, BinaryPackagePublishingHistory.distroarchseriesID == DistroArchSeries.id, DistroArchSeries.distroseries == distroseries, BinaryPackagePublishingHistory.archive == archive, BinaryPackagePublishingHistory.dateremoved == None) bprs = bprs.order_by(Desc(BinaryPackageRelease.datecreated)) bprs = bprs.config(distinct=True) if bprs.count() == 0: log.debug("No binary releases found.") return # find or create the cache entry cache = cls.selectOne(""" distroseries = %s AND archive = %s AND binarypackagename = %s """ % sqlvalues(distroseries, archive, binarypackagename)) if cache is None: log.debug("Creating new binary cache entry.") cache = cls(archive=archive, distroseries=distroseries, binarypackagename=binarypackagename) # make sure the cached name, summary and description are correct cache.name = binarypackagename.name cache.summary = bprs[0].summary cache.description = bprs[0].description # get the sets of binary package summaries, descriptions. there is # likely only one, but just in case... summaries = set() descriptions = set() for bpr in bprs: log.debug("Considering binary version %s" % bpr.version) summaries.add(bpr.summary) descriptions.add(bpr.description) # and update the caches cache.summaries = ' '.join(sorted(summaries)) cache.descriptions = ' '.join(sorted(descriptions))
def store_upload_jobs(self): jobs = Store.of(self).find( SnapBuildJob, SnapBuildJob.snapbuild == self, SnapBuildJob.job_type == SnapBuildJobType.STORE_UPLOAD) jobs.order_by(Desc(SnapBuildJob.job_id)) def preload_jobs(rows): load_related(Job, rows, ["job_id"]) return DecoratedResultSet( jobs, lambda job: job.makeDerived(), pre_iter_hook=preload_jobs)
def get_last_modification_date(self, store, tracker_id): result = store.find(DBManiphestIssueExt, DBManiphestIssueExt.issue_id == DBIssue.id, DBIssue.tracker_id == DBTracker.id, DBTracker.id == tracker_id) if result.is_empty(): return None db_issue_ext = result.order_by(Desc(DBManiphestIssueExt.updated_on))[0] updated_on = db_issue_ext.updated_on return updated_on
def get_last_modification_date(self, store, tracker_id): # get last modification date (day) stored in the database # select date_last_updated as date from issues_ext_bugzilla order by date result = store.find(DBJiraIssueExt, DBJiraIssueExt.issue_id == DBIssue.id, DBIssue.tracker_id == DBTracker.id, DBTracker.id == tracker_id) if result.is_empty(): return None else: db_issue_ext = result.order_by(Desc(DBJiraIssueExt.updated))[0] return db_issue_ext.updated.strftime('%Y-%m-%d %H:%M')
def reportErrorSummary(store): """Get a count of errors grouped by exception class and message. @param store: The C{Store} to fetch data from. @return: A list of C{(count, exception-class, message)} 3-tuples. The count is automatically converted to a string. """ count = Alias(Count()) result = store.find((count, ErrorLine.exceptionClass, ErrorLine.message)) result = result.group_by(ErrorLine.exceptionClass, ErrorLine.message) result = result.order_by(Desc(count), ErrorLine.exceptionClass) return [(str(count), exceptionClass, message) for count, exceptionClass, message in result]
def __getitem__(self, version): """See IDistroArchSeriesBinaryPackage.""" bpph = IStore(BinaryPackagePublishingHistory).find( BinaryPackagePublishingHistory, BinaryPackageRelease.version == version, *self._getPublicationJoins()).order_by( Desc(BinaryPackagePublishingHistory.datecreated)).first() if bpph is None: return None return DistroArchSeriesBinaryPackageRelease( distroarchseries=self.distroarchseries, binarypackagerelease=bpph.binarypackagerelease)
def get_last_modification_date(self, store, trk_id): # get last modification date (day) stored in the database # select date_last_updated as date from issues_ext_gerrit order by date result = store.find(DBGerritIssueExt, DBGerritIssueExt.issue_id == DBIssue.id, DBIssue.tracker_id == DBTracker.id, DBTracker.id == trk_id) aux = result.order_by(Desc(DBGerritIssueExt.mod_date))[:1] for entry in aux: return entry.mod_date.strftime('%Y-%m-%d %H:%M:%S') return None
def get_last_modification_date(self, store, tracker_id): result = store.find(DBReviewBoardIssueExt, DBReviewBoardIssueExt.issue_id == DBIssue.id, DBIssue.tracker_id == DBTracker.id, DBTracker.id == tracker_id) if result.is_empty(): return None db_issue_ext = result.order_by(Desc(DBReviewBoardIssueExt.mod_date))[0] mod_date = db_issue_ext.mod_date return mod_date
def getMedianBuildDuration(self): """Return the median duration of our successful builds.""" store = IStore(self) result = store.find( (LiveFSBuild.date_started, LiveFSBuild.date_finished), LiveFSBuild.livefs == self.livefs_id, LiveFSBuild.distro_arch_series == self.distro_arch_series_id, LiveFSBuild.status == BuildStatus.FULLYBUILT) result.order_by(Desc(LiveFSBuild.date_finished)) durations = [row[1] - row[0] for row in result[:9]] if len(durations) == 0: return None durations.sort() return durations[len(durations) // 2]