Ejemplo n.º 1
0
class StormHost(Host):
    __storm_table__ = "host"

    id = Int(primary=True)
    name = RawStr()
    owner_name = Unicode(name="owner")
    owner_email = Unicode()
    password = Unicode()
    ssh_access = Bool()
    fqdn = RawStr()
    platform = Unicode()
    permission = Unicode()
    last_dead_mail = Int()
    join_time = Int()

    def _set_owner(self, value):
        if value is None:
            self.owner_name = None
            self.owner_email = None
        else:
            (self.owner_name, self.owner_email) = value

    def _get_owner(self):
        if self.owner_name is None:
            return None
        else:
            return (self.owner_name, self.owner_email)

    owner = property(_get_owner, _set_owner)
Ejemplo n.º 2
0
class StormTree(Tree):
    __storm_table__ = "tree"

    id = Int(primary=True)
    name = RawStr()
    scm = Int()
    branch = RawStr()
    subdir = RawStr()
    repo = RawStr()
    scm = RawStr()
Ejemplo n.º 3
0
class OpaqueValue(Storm):
    """An opaque tag value

    @param fileID: The sha-256 hash of the file.
    @param content: The content of the file.
    """

    __storm_table__ = 'opaque_values'

    fileID = RawStr('file_id', primary=True, allow_none=False)
    content = RawStr('content', allow_none=False)

    def __init__(self, fileID, content):
        self.fileID = fileID
        self.content = content
Ejemplo n.º 4
0
class StormBuild(Build):
    __storm_table__ = "build"

    id = Int(primary=True)
    tree = RawStr()
    revision = RawStr()
    host = RawStr()
    compiler = RawStr()
    checksum = RawStr()
    upload_time = Int(name="age")
    status_str = RawStr(name="status")
    basename = RawStr()
    host_id = Int()
    tree_id = Int()
    compiler_id = Int()

    def status(self):
        return BuildStatus.__deserialize__(self.status_str)

    def revision_details(self):
        return self.revision

    def log_checksum(self):
        return self.checksum

    def remove(self):
        super(StormBuild, self).remove()
        Store.of(self).remove(self)

    def remove_logs(self):
        super(StormBuild, self).remove_logs()
        self.basename = None
Ejemplo n.º 5
0
class Attachment(Storm):

    __storm_table__ = "attachment"
    __storm_primary__ = "list_name", "message_id", "counter"

    list_name = Unicode()
    message_id = Unicode()
    counter = Int()
    name = Unicode()
    content_type = Unicode()
    encoding = Unicode()
    size = Int()
    content = RawStr()
    # reference to the email
    email = Reference((list_name, message_id),
                      (Email.list_name, Email.message_id))
Ejemplo n.º 6
0
class OpaqueValueLink(Storm):
    """A representation of tag_value - opaque_value many-to-one relation.

    @param valueID: The L{TagValue.id}.
    @param fileID: The L{OpaqueValue.fileID}
    """

    __storm_table__ = 'opaque_value_link'
    __storm_primary__ = 'valueID', 'fileID'

    valueID = Int('value_id', allow_none=False)
    fileID = RawStr('file_id', allow_none=False)

    def __init__(self, valueID, fileID):
        self.valueID = valueID
        self.fileID = fileID
Ejemplo n.º 7
0
class EmailFull(Storm):
    """
    The full contents of an archived email, for storage and post-processing
    reasons.
    """
    __storm_table__ = "email_full"
    __storm_primary__ = "list_name", "message_id"

    list_name = Unicode()
    message_id = Unicode()
    full = RawStr()
    email = Reference((list_name, message_id),
                      ("Email.list_name", "Email.message_id"))

    def __init__(self, list_name, message_id, full):
        self.list_name = unicode(list_name)
        self.message_id = unicode(message_id)
        self.full = full
Ejemplo n.º 8
0
class OAuthConsumer(Storm):
    """A L{User} that interacts with Fluidinfo using OAuth.

    @param userID: The L{User.id} associated with the key and secret.
    @param secret: The C{str} consumer secret.
    """

    __storm_table__ = 'oauth_consumers'

    userID = Int('user_id', primary=True, allow_none=False)
    secret = RawStr('secret', allow_none=False)
    creationTime = DateTime('creation_time', default=AutoReload)

    user = Reference(userID, 'User.id')

    def __init__(self, userID, secret):
        self.userID = userID
        self.secret = secret
Ejemplo n.º 9
0
class User(Storm):
    """A user of Fluidinfo.

    @param username: The username of the user.
    @param passwordHash: The hashed password of the user.
    @param fullname: The name of the user.
    @param email: The email address for the user.
    @param role: The L{Role} for the user.
    """

    __storm_table__ = 'users'

    id = Int('id', primary=True, allow_none=False, default=AutoReload)
    objectID = UUID('object_id', allow_none=False)
    role = ConstantEnum('role', enum_class=Role, allow_none=False)
    username = Unicode('username', allow_none=False)
    passwordHash = RawStr('password_hash', allow_none=False)
    fullname = Unicode('fullname', allow_none=False)
    email = Unicode('email', validator=validateEmail)
    namespaceID = Int('namespace_id')
    creationTime = DateTime('creation_time', default=AutoReload)

    namespace = Reference(namespaceID, 'Namespace.id')

    def __init__(self, username, passwordHash, fullname, email, role):
        self.objectID = uuid4()
        self.username = username
        self.passwordHash = passwordHash
        self.fullname = fullname
        self.email = email
        self.role = role

    def isAnonymous(self):
        """Returns C{True} if this user has the anonymous role."""
        return self.role == Role.ANONYMOUS

    def isSuperuser(self):
        """Returns C{True} if this user has the super user role."""
        return self.role == Role.SUPERUSER

    def isUser(self):
        """Returns C{True} if this user has the regular user role."""
        return self.role == Role.USER
class Person(object):
    __storm_table__ = 'person'
    id = Int(primary=True)
    name = RawStr()
class Address(object):
    __storm_table__ = 'address'
    id = Int(primary=True)
    address = RawStr()
    person_id = Int()
    person = Reference(person_id, Person.id)
Ejemplo n.º 12
0
class DistroSeriesPackageCache(SQLBase):
    _table = 'DistroSeriesPackageCache'

    archive = ForeignKey(dbName='archive', foreignKey='Archive', notNull=True)
    distroseries = ForeignKey(dbName='distroseries',
                              foreignKey='DistroSeries',
                              notNull=True)
    binarypackagename = ForeignKey(dbName='binarypackagename',
                                   foreignKey='BinaryPackageName',
                                   notNull=True)

    fti = RawStr(allow_none=True, default=None)
    name = StringCol(notNull=False, default=None)
    summary = StringCol(notNull=False, default=None)
    description = StringCol(notNull=False, default=None)
    summaries = StringCol(notNull=False, default=None)
    descriptions = StringCol(notNull=False, default=None)

    @classmethod
    def findCurrentBinaryPackageNames(cls, archive, distroseries):
        bpn_ids = IStore(BinaryPackagePublishingHistory).find(
            BinaryPackagePublishingHistory.binarypackagenameID,
            BinaryPackagePublishingHistory.distroarchseriesID.is_in(
                Select(DistroArchSeries.id,
                       tables=[DistroArchSeries],
                       where=DistroArchSeries.distroseries == distroseries)),
            BinaryPackagePublishingHistory.archive == archive,
            BinaryPackagePublishingHistory.status.is_in(
                (PackagePublishingStatus.PENDING,
                 PackagePublishingStatus.PUBLISHED))).config(distinct=True)
        return bulk.load(BinaryPackageName, bpn_ids)

    @classmethod
    def _find(cls, distroseries, archive=None):
        """All of the cached binary package records for this distroseries.

        If 'archive' is not given it will return all caches stored for the
        distroseries main archives (PRIMARY and PARTNER).
        """
        if archive is not None:
            archives = [archive.id]
        else:
            archives = distroseries.distribution.all_distro_archive_ids

        return IStore(cls).find(cls, cls.distroseries == distroseries,
                                cls.archiveID.is_in(archives)).order_by(
                                    cls.name)

    @classmethod
    def removeOld(cls, distroseries, archive, log):
        """Delete any records that are no longer applicable.

        Consider all binarypackages marked as REMOVED.

        Also purges all existing cache records for disabled archives.

        :param archive: target `IArchive`.
        :param log: the context logger object able to print DEBUG level
            messages.
        """
        # get the set of package names that should be there
        if not archive.enabled:
            bpns = set()
        else:
            bpns = set(cls.findCurrentBinaryPackageNames(
                archive, distroseries))

        # remove the cache entries for binary packages we no longer want
        for cache in cls._find(distroseries, archive):
            if cache.binarypackagename not in bpns:
                log.debug("Removing binary cache for '%s' (%s)" %
                          (cache.name, cache.id))
                cache.destroySelf()

    @classmethod
    def _update(cls, distroseries, binarypackagenames, archive, log):
        """Update the package cache for a given set of `IBinaryPackageName`s.

        'log' is required, it should be a logger object able to print
        DEBUG level messages.
        'ztm' is the current trasaction manager used for partial commits
        (in full batches of 100 elements)
        """
        # get the set of published binarypackagereleases
        all_details = list(
            IStore(BinaryPackageRelease).find(
                (BinaryPackageRelease.binarypackagenameID,
                 BinaryPackageRelease.summary,
                 BinaryPackageRelease.description,
                 Max(BinaryPackageRelease.datecreated)),
                BinaryPackageRelease.id ==
                BinaryPackagePublishingHistory.binarypackagereleaseID,
                BinaryPackagePublishingHistory.binarypackagenameID.is_in(
                    [bpn.id for bpn in binarypackagenames]),
                BinaryPackagePublishingHistory.distroarchseriesID.is_in(
                    Select(
                        DistroArchSeries.id,
                        tables=[DistroArchSeries],
                        where=DistroArchSeries.distroseries == distroseries)),
                BinaryPackagePublishingHistory.archive == archive,
                BinaryPackagePublishingHistory.status.is_in(
                    (PackagePublishingStatus.PENDING,
                     PackagePublishingStatus.PUBLISHED))).group_by(
                         BinaryPackageRelease.binarypackagenameID,
                         BinaryPackageRelease.summary,
                         BinaryPackageRelease.description).order_by(
                             BinaryPackageRelease.binarypackagenameID,
                             Desc(Max(BinaryPackageRelease.datecreated))))
        if not all_details:
            log.debug("No binary releases found.")
            return

        details_map = defaultdict(list)
        for (bpn_id, summary, description, datecreated) in all_details:
            bpn = IStore(BinaryPackageName).get(BinaryPackageName, bpn_id)
            details_map[bpn].append((summary, description))

        all_caches = IStore(cls).find(
            cls, cls.distroseries == distroseries, cls.archive == archive,
            cls.binarypackagenameID.is_in(
                [bpn.id for bpn in binarypackagenames]))
        cache_map = {cache.binarypackagename: cache for cache in all_caches}

        for bpn in set(binarypackagenames) - set(cache_map):
            cache_map[bpn] = cls(archive=archive,
                                 distroseries=distroseries,
                                 binarypackagename=bpn)

        for bpn in binarypackagenames:
            cache = cache_map[bpn]
            details = details_map[bpn]
            # make sure the cached name, summary and description are correct
            cache.name = bpn.name
            cache.summary = details[0][0]
            cache.description = details[0][1]

            # get the sets of binary package summaries, descriptions. there is
            # likely only one, but just in case...

            summaries = set()
            descriptions = set()
            for summary, description in details:
                summaries.add(summary)
                descriptions.add(description)

            # and update the caches
            cache.summaries = ' '.join(sorted(summaries))
            cache.descriptions = ' '.join(sorted(descriptions))

    @classmethod
    def updateAll(cls, distroseries, archive, log, ztm, commit_chunk=500):
        """Update the binary package cache

        Consider all binary package names published in this distro series
        and entirely skips updates for disabled archives

        :param archive: target `IArchive`;
        :param log: logger object for printing debug level information;
        :param ztm:  transaction used for partial commits, every chunk of
            'commit_chunk' updates is committed;
        :param commit_chunk: number of updates before commit, defaults to 500.

        :return the number of packages updated.
        """
        # Do not create cache entries for disabled archives.
        if not archive.enabled:
            return

        # Get the set of package names to deal with.
        bpns = list(
            sorted(cls.findCurrentBinaryPackageNames(archive, distroseries),
                   key=attrgetter('name')))

        number_of_updates = 0
        chunks = []
        chunk = []
        for bpn in bpns:
            chunk.append(bpn)
            if len(chunk) == commit_chunk:
                chunks.append(chunk)
                chunk = []
        if chunk:
            chunks.append(chunk)
        for chunk in chunks:
            bulk.load(BinaryPackageName, [bpn.id for bpn in chunk])
            log.debug("Considering binaries %s",
                      ', '.join([bpn.name for bpn in chunk]))
            cls._update(distroseries, chunk, archive, log)
            number_of_updates += len(chunk)
            log.debug("Committing")
            ztm.commit()

        return number_of_updates
class DistroSeriesPackageCache(SQLBase):
    implements(IDistroSeriesPackageCache)
    _table = 'DistroSeriesPackageCache'

    archive = ForeignKey(dbName='archive', foreignKey='Archive', notNull=True)
    distroseries = ForeignKey(dbName='distroseries',
                              foreignKey='DistroSeries',
                              notNull=True)
    binarypackagename = ForeignKey(dbName='binarypackagename',
                                   foreignKey='BinaryPackageName',
                                   notNull=True)

    fti = RawStr(allow_none=True, default=None)
    name = StringCol(notNull=False, default=None)
    summary = StringCol(notNull=False, default=None)
    description = StringCol(notNull=False, default=None)
    summaries = StringCol(notNull=False, default=None)
    descriptions = StringCol(notNull=False, default=None)

    @classmethod
    def _find(cls, distroseries, archive=None):
        """All of the cached binary package records for this distroseries.

        If 'archive' is not given it will return all caches stored for the
        distroseries main archives (PRIMARY and PARTNER).
        """
        if archive is not None:
            archives = [archive.id]
        else:
            archives = distroseries.distribution.all_distro_archive_ids

        return IStore(cls).find(cls, cls.distroseries == distroseries,
                                cls.archiveID.is_in(archives)).order_by(
                                    cls.name)

    @classmethod
    def removeOld(cls, distroseries, archive, log):
        """Delete any records that are no longer applicable.

        Consider all binarypackages marked as REMOVED.

        Also purges all existing cache records for disabled archives.

        :param archive: target `IArchive`.
        :param log: the context logger object able to print DEBUG level
            messages.
        """
        # get the set of package names that should be there
        bpns = set(
            BinaryPackageName.select("""
            BinaryPackagePublishingHistory.distroarchseries =
                DistroArchSeries.id AND
            DistroArchSeries.distroseries = %s AND
            Archive.id = %s AND
            BinaryPackagePublishingHistory.archive = Archive.id AND
            BinaryPackagePublishingHistory.binarypackagerelease =
                BinaryPackageRelease.id AND
            BinaryPackagePublishingHistory.binarypackagename =
                BinaryPackageName.id AND
            BinaryPackagePublishingHistory.dateremoved is NULL AND
            Archive.enabled = TRUE
            """ % sqlvalues(distroseries.id, archive.id),
                                     distinct=True,
                                     clauseTables=[
                                         'Archive', 'DistroArchSeries',
                                         'BinaryPackagePublishingHistory',
                                         'BinaryPackageRelease'
                                     ]))

        # remove the cache entries for binary packages we no longer want
        for cache in cls._find(distroseries, archive):
            if cache.binarypackagename not in bpns:
                log.debug("Removing binary cache for '%s' (%s)" %
                          (cache.name, cache.id))
                cache.destroySelf()

    @classmethod
    def _update(cls, distroseries, binarypackagename, archive, log):
        """Update the package cache for a given IBinaryPackageName

        'log' is required, it should be a logger object able to print
        DEBUG level messages.
        'ztm' is the current trasaction manager used for partial commits
        (in full batches of 100 elements)
        """
        # get the set of published binarypackagereleases
        bprs = IStore(BinaryPackageRelease).find(
            BinaryPackageRelease, BinaryPackageRelease.id ==
            BinaryPackagePublishingHistory.binarypackagereleaseID,
            BinaryPackagePublishingHistory.binarypackagename ==
            binarypackagename,
            BinaryPackagePublishingHistory.distroarchseriesID ==
            DistroArchSeries.id, DistroArchSeries.distroseries == distroseries,
            BinaryPackagePublishingHistory.archive == archive,
            BinaryPackagePublishingHistory.dateremoved == None)
        bprs = bprs.order_by(Desc(BinaryPackageRelease.datecreated))
        bprs = bprs.config(distinct=True)

        if bprs.count() == 0:
            log.debug("No binary releases found.")
            return

        # find or create the cache entry
        cache = cls.selectOne("""
            distroseries = %s AND
            archive = %s AND
            binarypackagename = %s
            """ % sqlvalues(distroseries, archive, binarypackagename))
        if cache is None:
            log.debug("Creating new binary cache entry.")
            cache = cls(archive=archive,
                        distroseries=distroseries,
                        binarypackagename=binarypackagename)

        # make sure the cached name, summary and description are correct
        cache.name = binarypackagename.name
        cache.summary = bprs[0].summary
        cache.description = bprs[0].description

        # get the sets of binary package summaries, descriptions. there is
        # likely only one, but just in case...

        summaries = set()
        descriptions = set()
        for bpr in bprs:
            log.debug("Considering binary version %s" % bpr.version)
            summaries.add(bpr.summary)
            descriptions.add(bpr.description)

        # and update the caches
        cache.summaries = ' '.join(sorted(summaries))
        cache.descriptions = ' '.join(sorted(descriptions))

    @classmethod
    def updateAll(cls, distroseries, archive, log, ztm, commit_chunk=500):
        """Update the binary package cache

        Consider all binary package names published in this distro series
        and entirely skips updates for disabled archives

        :param archive: target `IArchive`;
        :param log: logger object for printing debug level information;
        :param ztm:  transaction used for partial commits, every chunk of
            'commit_chunk' updates is committed;
        :param commit_chunk: number of updates before commit, defaults to 500.

        :return the number of packages updated.
        """
        # Do not create cache entries for disabled archives.
        if not archive.enabled:
            return

        # Get the set of package names to deal with.
        bpns = IStore(BinaryPackageName).find(
            BinaryPackageName, DistroArchSeries.distroseries == distroseries,
            BinaryPackagePublishingHistory.distroarchseriesID ==
            DistroArchSeries.id,
            BinaryPackagePublishingHistory.archive == archive,
            BinaryPackagePublishingHistory.binarypackagename ==
            BinaryPackageName.id,
            BinaryPackagePublishingHistory.dateremoved == None).config(
                distinct=True).order_by(BinaryPackageName.name)

        number_of_updates = 0
        chunk_size = 0
        for bpn in bpns:
            log.debug("Considering binary '%s'" % bpn.name)
            cls._update(distroseries, bpn, archive, log)
            number_of_updates += 1
            chunk_size += 1
            if chunk_size == commit_chunk:
                chunk_size = 0
                log.debug("Committing")
                ztm.commit()

        return number_of_updates
Ejemplo n.º 14
0
class StormTest(Test):
    __storm_table__ = "test"

    id = Int(primary=True)
    name = RawStr()