class SourcePackageRecipeBuild(SpecificBuildFarmJobSourceMixin,
                               PackageBuildMixin, Storm):

    __storm_table__ = 'SourcePackageRecipeBuild'

    job_type = BuildFarmJobType.RECIPEBRANCHBUILD

    id = Int(primary=True)

    build_farm_job_id = Int(name='build_farm_job', allow_none=False)
    build_farm_job = Reference(build_farm_job_id, BuildFarmJob.id)

    @property
    def binary_builds(self):
        """See `ISourcePackageRecipeBuild`."""
        return Store.of(self).find(
            BinaryPackageBuild, BinaryPackageBuild.source_package_release ==
            SourcePackageRelease.id,
            SourcePackageRelease.source_package_recipe_build == self.id)

    @property
    def current_component(self):
        # Only PPAs currently have a sane default component at the
        # moment, but we only support recipes for PPAs.
        component = self.archive.default_component
        assert component is not None
        return component

    archive_id = Int(name='archive', allow_none=False)
    archive = Reference(archive_id, 'Archive.id')

    distroseries_id = Int(name='distroseries', allow_none=True)
    distroseries = Reference(distroseries_id, 'DistroSeries.id')
    distro_series = distroseries

    pocket = DBEnum(name='pocket',
                    enum=PackagePublishingPocket,
                    allow_none=False)

    @property
    def distribution(self):
        """See `IPackageBuild`."""
        return self.distroseries.distribution

    recipe_id = Int(name='recipe')
    recipe = Reference(recipe_id, 'SourcePackageRecipe.id')

    requester_id = Int(name='requester', allow_none=False)
    requester = Reference(requester_id, 'Person.id')

    upload_log_id = Int(name='upload_log')
    upload_log = Reference(upload_log_id, 'LibraryFileAlias.id')

    dependencies = Unicode(name='dependencies')

    processor_id = Int(name='processor')
    processor = Reference(processor_id, 'Processor.id')
    virtualized = Bool(name='virtualized')

    date_created = DateTime(name='date_created',
                            tzinfo=pytz.UTC,
                            allow_none=False)
    date_started = DateTime(name='date_started', tzinfo=pytz.UTC)
    date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC)
    date_first_dispatched = DateTime(name='date_first_dispatched',
                                     tzinfo=pytz.UTC)

    builder_id = Int(name='builder')
    builder = Reference(builder_id, 'Builder.id')

    status = DBEnum(name='status', enum=BuildStatus, allow_none=False)

    log_id = Int(name='log')
    log = Reference(log_id, 'LibraryFileAlias.id')

    failure_count = Int(name='failure_count', allow_none=False)

    manifest = Reference(
        id,
        'SourcePackageRecipeData.sourcepackage_recipe_build_id',
        on_remote=True)

    def setManifestText(self, text):
        if text is None:
            if self.manifest is not None:
                IStore(self.manifest).remove(self.manifest)
        elif self.manifest is None:
            getUtility(ISourcePackageRecipeDataSource).createManifestFromText(
                text, self)
        else:
            parsed, recipe_branch_type = (
                getUtility(IRecipeBranchSource).getParsedRecipe(text))
            self.manifest.setRecipe(parsed, recipe_branch_type)

    def getManifestText(self):
        if self.manifest is None:
            return None
        return str(self.manifest.getRecipe())

    @property
    def source_package_release(self):
        """See `ISourcePackageRecipeBuild`."""
        return Store.of(self).find(SourcePackageRelease,
                                   source_package_recipe_build=self).one()

    @property
    def title(self):
        if self.recipe is None:
            branch_name = 'deleted'
        else:
            branch_name = self.recipe.base.unique_name
        return '%s recipe build in %s %s' % (
            branch_name, self.distribution.name, self.distroseries.name)

    def __init__(self, build_farm_job, distroseries, recipe, requester,
                 archive, pocket, date_created):
        """Construct a SourcePackageRecipeBuild."""
        processor = distroseries.nominatedarchindep.processor
        super(SourcePackageRecipeBuild, self).__init__()
        self.build_farm_job = build_farm_job
        self.distroseries = distroseries
        self.recipe = recipe
        self.requester = requester
        self.archive = archive
        self.pocket = pocket
        self.status = BuildStatus.NEEDSBUILD
        self.processor = processor
        self.virtualized = True
        if date_created is not None:
            self.date_created = date_created

    @classmethod
    def new(cls,
            distroseries,
            recipe,
            requester,
            archive,
            pocket=None,
            date_created=None,
            duration=None):
        """See `ISourcePackageRecipeBuildSource`."""
        store = IMasterStore(SourcePackageRecipeBuild)
        if pocket is None:
            pocket = PackagePublishingPocket.RELEASE
        if date_created is None:
            date_created = UTC_NOW
        build_farm_job = getUtility(IBuildFarmJobSource).new(
            cls.job_type, BuildStatus.NEEDSBUILD, date_created, None, archive)
        spbuild = cls(build_farm_job, distroseries, recipe, requester, archive,
                      pocket, date_created)
        store.add(spbuild)
        return spbuild

    @staticmethod
    def makeDailyBuilds(logger=None):
        from lp.code.model.sourcepackagerecipe import SourcePackageRecipe
        recipes = SourcePackageRecipe.findStaleDailyBuilds()
        if logger is None:
            logger = logging.getLogger()
        builds = []
        for recipe in recipes:
            recipe.is_stale = False
            logger.debug('Recipe %s/%s is stale', recipe.owner.name,
                         recipe.name)
            if recipe.daily_build_archive is None:
                logger.debug(' - No daily build archive specified.')
                continue
            for distroseries in recipe.distroseries:
                series_name = distroseries.named_version
                try:
                    build = recipe.requestBuild(
                        recipe.daily_build_archive, recipe.owner, distroseries,
                        PackagePublishingPocket.RELEASE)
                except BuildAlreadyPending:
                    logger.debug(' - build already pending for %s',
                                 series_name)
                    continue
                except CannotUploadToArchive as e:
                    # This will catch all PPA related issues -
                    # disabled, security, wrong pocket etc
                    logger.debug(' - daily build failed for %s: %s',
                                 series_name, repr(e))
                except BuildNotAllowedForDistro:
                    logger.debug(' - cannot build against %s.' % series_name)
                except ProgrammingError:
                    raise
                except:
                    logger.exception(' - problem with %s', series_name)
                else:
                    logger.debug(' - build requested for %s', series_name)
                    builds.append(build)
        return builds

    @property
    def can_be_rescored(self):
        """See `IBuild`."""
        return self.status is BuildStatus.NEEDSBUILD

    @property
    def can_be_cancelled(self):
        """See `ISourcePackageRecipeBuild`."""
        if not self.buildqueue_record:
            return False

        cancellable_statuses = [
            BuildStatus.BUILDING,
            BuildStatus.NEEDSBUILD,
        ]
        return self.status in cancellable_statuses

    def cancel(self):
        """See `ISourcePackageRecipeBuild`."""
        if not self.can_be_cancelled:
            return
        # BuildQueue.cancel() will decide whether to go straight to
        # CANCELLED, or go through CANCELLING to let buildd-manager
        # clean up the slave.
        self.buildqueue_record.cancel()

    def destroySelf(self):
        if self.buildqueue_record is not None:
            self.buildqueue_record.destroySelf()
        store = Store.of(self)
        releases = store.find(
            SourcePackageRelease,
            SourcePackageRelease.source_package_recipe_build == self.id)
        for release in releases:
            release.source_package_recipe_build = None
        store.remove(self)
        store.remove(self.build_farm_job)

    def calculateScore(self):
        return 2510 + self.archive.relative_build_score

    @classmethod
    def getByID(cls, build_id):
        """See `ISourcePackageRecipeBuildSource`."""
        store = IMasterStore(SourcePackageRecipeBuild)
        return store.find(cls, cls.id == build_id).one()

    @classmethod
    def getByBuildFarmJob(cls, build_farm_job):
        """See `ISpecificBuildFarmJobSource`."""
        return Store.of(build_farm_job).find(
            cls, build_farm_job_id=build_farm_job.id).one()

    @classmethod
    def preloadBuildsData(cls, builds):
        # Circular imports.
        from lp.code.model.sourcepackagerecipe import SourcePackageRecipe
        from lp.registry.model.distribution import Distribution
        from lp.registry.model.distroseries import DistroSeries
        from lp.services.librarian.model import LibraryFileAlias
        load_related(LibraryFileAlias, builds, ['log_id'])
        archives = load_related(Archive, builds, ['archive_id'])
        load_related(Person, archives, ['ownerID'])
        distroseries = load_related(DistroSeries, builds, ['distroseries_id'])
        load_related(Distribution, distroseries, ['distributionID'])
        sprs = load_related(SourcePackageRecipe, builds, ['recipe_id'])
        SourcePackageRecipe.preLoadDataForSourcePackageRecipes(sprs)

    @classmethod
    def getByBuildFarmJobs(cls, build_farm_jobs):
        """See `ISpecificBuildFarmJobSource`."""
        if len(build_farm_jobs) == 0:
            return EmptyResultSet()
        rows = Store.of(build_farm_jobs[0]).find(
            cls,
            cls.build_farm_job_id.is_in(bfj.id for bfj in build_farm_jobs))
        return DecoratedResultSet(rows, pre_iter_hook=cls.preloadBuildsData)

    def estimateDuration(self):
        """See `IPackageBuild`."""
        median = self.recipe.getMedianBuildDuration()
        if median is not None:
            return median
        return timedelta(minutes=10)

    def verifySuccessfulUpload(self):
        return self.source_package_release is not None

    def notify(self, extra_info=None):
        """See `IPackageBuild`."""
        # If our recipe has been deleted, any notification will fail.
        if self.recipe is None:
            return
        if self.status == BuildStatus.FULLYBUILT:
            # Don't send mail for successful recipe builds; it can be just
            # too much.
            return
        mailer = SourcePackageRecipeBuildMailer.forStatus(self)
        mailer.sendAll()

    def lfaUrl(self, lfa):
        """Return the URL for a LibraryFileAlias, in the context of self.
        """
        if lfa is None:
            return None
        return ProxiedLibraryFileAlias(lfa, self).http_url

    @property
    def log_url(self):
        """See `IPackageBuild`.

        Overridden here so that it uses the SourcePackageRecipeBuild as
        context.
        """
        return self.lfaUrl(self.log)

    @property
    def upload_log_url(self):
        """See `IPackageBuild`.

        Overridden here so that it uses the SourcePackageRecipeBuild as
        context.
        """
        return self.lfaUrl(self.upload_log)

    def getFileByName(self, filename):
        """See `ISourcePackageRecipeBuild`."""
        files = dict((lfa.filename, lfa)
                     for lfa in [self.log, self.upload_log] if lfa is not None)
        try:
            return files[filename]
        except KeyError:
            raise NotFoundError(filename)

    def getUploader(self, changes):
        """See `IPackageBuild`."""
        return self.requester
Esempio n. 2
0
class Context(Model):
    """
    This model keeps track of specific contexts settings
    """
    __storm_table__ = 'context'

    # Unique fields is a dict with a unique ID as key,
    # and as value another dict, containing the field
    # descriptive values:
    # "presentation_order" : int
    # "preview" : bool
    # "required" : bool
    # "type" : unicode
    # "options" : dict (optional!)
    unique_fields = Pickle()

    # Localized fields is a dict having as keys, the same
    # keys of unique_fields, and as value a dict, containing:
    # 'name' : unicode
    # 'hint' : unicode
    localized_fields = Pickle()

    selectable_receiver = Bool()
    escalation_threshold = Int()

    tip_max_access = Int()
    file_max_download = Int()
    file_required = Bool()
    tip_timetolive = Int()
    submission_timetolive = Int()
    last_update = DateTime()
    tags = Pickle()

    # localized stuff
    name = Pickle(validator=shortlocal_v)
    description = Pickle(validator=longlocal_v)
    receiver_introduction = Pickle(validator=longlocal_v)
    fields_introduction = Pickle(validator=longlocal_v)

    #receivers = ReferenceSet(
    #                         Context.id,
    #                         ReceiverContext.context_id,
    #                         ReceiverContext.receiver_id,
    #                         Receiver.id)

    select_all_receivers = Bool()
    postpone_superpower = Bool()
    can_delete_submission = Bool()

    maximum_selectable_receivers = Int()
    require_file_description = Bool()
    delete_consensus_percentage = Int()
    require_pgp = Bool()
    show_small_cards = Bool()

    presentation_order = Int()

    unicode_keys = []
    localized_strings = [
        'name', 'description', 'receiver_introduction', 'fields_introduction'
    ]
    int_keys = [
        'escalation_threshold', 'tip_max_access', 'file_max_download',
        'maximum_selectable_receivers', 'delete_consensus_percentage',
        'presentation_order'
    ]
    bool_keys = [
        'selectable_receiver', 'file_required', 'select_all_receivers',
        'postpone_superpower', 'can_delete_submission',
        'require_file_description', 'require_pgp', 'show_small_cards'
    ]
Esempio n. 3
0
class TranslationTemplatesBuild(SpecificBuildFarmJobSourceMixin,
                                BuildFarmJobMixin, Storm):
    """A `BuildFarmJob` extension for translation templates builds."""

    __storm_table__ = 'TranslationTemplatesBuild'

    job_type = BuildFarmJobType.TRANSLATIONTEMPLATESBUILD

    id = Int(name='id', primary=True)
    build_farm_job_id = Int(name='build_farm_job', allow_none=False)
    build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id')
    branch_id = Int(name='branch', allow_none=False)
    branch = Reference(branch_id, 'Branch.id')

    processor_id = Int(name='processor')
    processor = Reference(processor_id, 'Processor.id')
    virtualized = Bool(name='virtualized')

    date_created = DateTime(name='date_created',
                            tzinfo=pytz.UTC,
                            allow_none=False)
    date_started = DateTime(name='date_started', tzinfo=pytz.UTC)
    date_finished = DateTime(name='date_finished', tzinfo=pytz.UTC)
    date_first_dispatched = DateTime(name='date_first_dispatched',
                                     tzinfo=pytz.UTC)

    builder_id = Int(name='builder')
    builder = Reference(builder_id, 'Builder.id')

    status = DBEnum(name='status', enum=BuildStatus, allow_none=False)

    log_id = Int(name='log')
    log = Reference(log_id, 'LibraryFileAlias.id')

    failure_count = Int(name='failure_count', allow_none=False)

    @property
    def title(self):
        return u'Translation template build for %s' % (self.branch.displayname)

    def __init__(self, build_farm_job, branch, processor):
        super(TranslationTemplatesBuild, self).__init__()
        self.build_farm_job = build_farm_job
        self.branch = branch
        self.status = BuildStatus.NEEDSBUILD
        self.processor = processor
        self.virtualized = True

    def estimateDuration(self):
        """See `IBuildFarmJob`."""
        return timedelta(seconds=10)

    @classmethod
    def _getStore(cls, store=None):
        """Return `store` if given, or the default."""
        if store is None:
            return IStore(cls)
        else:
            return store

    @classmethod
    def _getBuildArch(cls):
        """Returns an `IProcessor` to queue a translation build for."""
        # XXX Danilo Segan bug=580429: we hard-code processor to the Ubuntu
        # default processor architecture.  This stops the buildfarm from
        # accidentally dispatching the jobs to private builders.
        ubuntu = getUtility(ILaunchpadCelebrities).ubuntu
        return ubuntu.currentseries.nominatedarchindep.processor

    @classmethod
    def _hasPotteryCompatibleSetup(cls, branch):
        """Does `branch` look as if pottery can generate templates for it?

        :param branch: A `Branch` object.
        """
        bzr_branch = removeSecurityProxy(branch).getBzrBranch()
        return is_intltool_structure(bzr_branch.basis_tree())

    @classmethod
    def generatesTemplates(cls, branch):
        """See `ITranslationTemplatesBuildSource`."""
        logger = logging.getLogger('translation-templates-build')
        if branch.private:
            # We don't support generating template from private branches
            # at the moment.
            logger.debug("Branch %s is private.", branch.unique_name)
            return False

        utility = getUtility(IRosettaUploadJobSource)
        if not utility.providesTranslationFiles(branch):
            # Nobody asked for templates generated from this branch.
            logger.debug("No templates requested for branch %s.",
                         branch.unique_name)
            return False

        if not cls._hasPotteryCompatibleSetup(branch):
            # Nothing we could do with this branch if we wanted to.
            logger.debug("Branch %s is not pottery-compatible.",
                         branch.unique_name)
            return False

        # Yay!  We made it.
        return True

    @classmethod
    def create(cls, branch):
        """See `ITranslationTemplatesBuildSource`."""
        processor = cls._getBuildArch()
        build_farm_job = getUtility(IBuildFarmJobSource).new(
            BuildFarmJobType.TRANSLATIONTEMPLATESBUILD)
        build = TranslationTemplatesBuild(build_farm_job, branch, processor)
        store = cls._getStore()
        store.add(build)
        store.flush()
        return build

    @classmethod
    def scheduleTranslationTemplatesBuild(cls, branch):
        """See `ITranslationTemplatesBuildSource`."""
        logger = logging.getLogger('translation-templates-build')
        if not config.rosetta.generate_templates:
            # This feature is disabled by default.
            logging.debug("Templates generation is disabled.")
            return

        try:
            if cls.generatesTemplates(branch):
                # This branch is used for generating templates.
                logger.info("Requesting templates build for branch %s.",
                            branch.unique_name)
                cls.create(branch).queueBuild()
        except Exception as e:
            logger.error(e)
            raise

    @classmethod
    def getByID(cls, build_id, store=None):
        """See `ITranslationTemplatesBuildSource`."""
        store = cls._getStore(store)
        return store.get(TranslationTemplatesBuild, build_id)

    @classmethod
    def getByBuildFarmJob(cls, buildfarmjob, store=None):
        """See `ITranslationTemplatesBuildSource`."""
        store = cls._getStore(store)
        match = store.find(TranslationTemplatesBuild,
                           build_farm_job_id=buildfarmjob.id)
        return match.one()

    @classmethod
    def getByBuildFarmJobs(cls, buildfarmjobs, store=None):
        """See `ITranslationTemplatesBuildSource`."""
        store = cls._getStore(store)
        rows = store.find(
            TranslationTemplatesBuild,
            TranslationTemplatesBuild.build_farm_job_id.is_in(
                bfj.id for bfj in buildfarmjobs))
        return DecoratedResultSet(rows, pre_iter_hook=cls.preloadBuildsData)

    @classmethod
    def preloadBuildsData(cls, builds):
        # Circular imports.
        from lp.services.librarian.model import LibraryFileAlias
        # Load the related branches.
        branches = load_related(Branch, builds, ['branch_id'])
        # Preload branches' cached associated targets, product series, and
        # suite source packages for all the related branches.
        GenericBranchCollection.preloadDataForBranches(branches)
        load_related(LibraryFileAlias, builds, ['log_id'])

    @classmethod
    def findByBranch(cls, branch, store=None):
        """See `ITranslationTemplatesBuildSource`."""
        store = cls._getStore(store)
        return store.find(TranslationTemplatesBuild,
                          TranslationTemplatesBuild.branch == branch)

    @property
    def log_url(self):
        """See `IBuildFarmJob`."""
        if self.log is None:
            return None
        return self.log.http_url

    def calculateScore(self):
        """See `IBuildFarmJob`."""
        # Hard-code score for now.  Most PPA jobs start out at 2510;
        # TranslationTemplateBuild are fast so we want them at a higher
        # priority.
        return HARDCODED_TRANSLATIONTEMPLATESBUILD_SCORE
Esempio n. 4
0
class Job(SQLBase):
    """See `IJob`."""

    @property
    def job_id(self):
        return self.id

    scheduled_start = UtcDateTimeCol()

    date_created = UtcDateTimeCol()

    date_started = UtcDateTimeCol()

    date_finished = UtcDateTimeCol()

    lease_expires = UtcDateTimeCol()

    log = StringCol()

    _status = EnumCol(
        enum=JobStatus, notNull=True, default=JobStatus.WAITING,
        dbName='status')

    attempt_count = Int(default=0)

    max_retries = Int(default=0)

    requester_id = Int(name='requester', allow_none=True)
    requester = Reference(requester_id, 'Person.id')

    base_json_data = JSON(name='json_data')

    base_job_type = EnumCol(enum=JobType, dbName='job_type')

    # Mapping of valid target states from a given state.
    _valid_transitions = {
        JobStatus.WAITING:
            (JobStatus.RUNNING,
             JobStatus.SUSPENDED),
        JobStatus.RUNNING:
            (JobStatus.COMPLETED,
             JobStatus.FAILED,
             JobStatus.SUSPENDED,
             JobStatus.WAITING),
        JobStatus.FAILED: (JobStatus.WAITING,),
        JobStatus.COMPLETED: (JobStatus.WAITING,),
        JobStatus.SUSPENDED:
            (JobStatus.WAITING,),
        }

    # Set of all states where the job could eventually complete.
    PENDING_STATUSES = frozenset(
        (JobStatus.WAITING,
         JobStatus.RUNNING,
         JobStatus.SUSPENDED))

    def _set_status(self, status):
        if status not in self._valid_transitions[self._status]:
            raise InvalidTransition(self._status, status)
        self._status = status

    status = property(lambda x: x._status)

    @property
    def is_pending(self):
        """See `IJob`."""
        return self.status in self.PENDING_STATUSES

    @property
    def is_runnable(self):
        """See `IJob`."""
        if self.status != JobStatus.WAITING:
            return False
        if self.scheduled_start is None:
            return True
        return self.scheduled_start <= datetime.datetime.now(UTC)

    @classmethod
    def createMultiple(self, store, num_jobs, requester=None):
        """Create multiple `Job`s at once.

        :param store: `Store` to ceate the jobs in.
        :param num_jobs: Number of `Job`s to create.
        :param request: The `IPerson` requesting the jobs.
        :return: An iterable of `Job.id` values for the new jobs.
        """
        return bulk.create(
                (Job._status, Job.requester),
                [(JobStatus.WAITING, requester) for i in range(num_jobs)],
                get_primary_keys=True)

    def acquireLease(self, duration=300):
        """See `IJob`."""
        if (self.lease_expires is not None
            and self.lease_expires >= datetime.datetime.now(UTC)):
            raise LeaseHeld
        expiry = datetime.datetime.fromtimestamp(time.time() + duration,
            UTC)
        self.lease_expires = expiry

    def getTimeout(self):
        """Return the number of seconds until the job should time out.

        Jobs timeout when their leases expire.  If the lease for this job has
        already expired, return 0.
        """
        expiry = timegm(self.lease_expires.timetuple())
        return max(0, expiry - time.time())

    def start(self, manage_transaction=False):
        """See `IJob`."""
        self._set_status(JobStatus.RUNNING)
        self.date_started = datetime.datetime.now(UTC)
        self.date_finished = None
        self.attempt_count += 1
        if manage_transaction:
            transaction.commit()

    def complete(self, manage_transaction=False):
        """See `IJob`."""
        # Commit the transaction to update the DB time.
        if manage_transaction:
            transaction.commit()
        self._set_status(JobStatus.COMPLETED)
        self.date_finished = datetime.datetime.now(UTC)
        if manage_transaction:
            transaction.commit()

    def fail(self, manage_transaction=False):
        """See `IJob`."""
        if manage_transaction:
            transaction.abort()
        self._set_status(JobStatus.FAILED)
        self.date_finished = datetime.datetime.now(UTC)
        if manage_transaction:
            transaction.commit()

    def queue(self, manage_transaction=False, abort_transaction=False,
              add_commit_hook=None):
        """See `IJob`."""
        if manage_transaction:
            if abort_transaction:
                transaction.abort()
            # Commit the transaction to update the DB time.
            transaction.commit()
        if self.status != JobStatus.WAITING:
            self._set_status(JobStatus.WAITING)
        self.date_finished = datetime.datetime.now(UTC)
        # Release the lease to allow short retry delays to be effective.
        self.lease_expires = None
        if add_commit_hook is not None:
            add_commit_hook()
        if manage_transaction:
            transaction.commit()

    def suspend(self, manage_transaction=False):
        """See `IJob`."""
        self._set_status(JobStatus.SUSPENDED)
        if manage_transaction:
            transaction.commit()

    def resume(self):
        """See `IJob`."""
        if self.status is not JobStatus.SUSPENDED:
            raise InvalidTransition(self._status, JobStatus.WAITING)
        self._set_status(JobStatus.WAITING)
        self.lease_expires = None
Esempio n. 5
0
class FieldOption_v_20(Model):
    __storm_table__ = 'fieldoption'
    field_id = Unicode()
    number = Int()
    attrs = JSON()
Esempio n. 6
0
        class Test(object):
            __storm_table__ = "test"
            bar = Int(primary=True)

            def __init__(self, bar):
                self.bar = bar
Esempio n. 7
0
class Revision(SQLBase):
    """See IRevision."""

    date_created = UtcDateTimeCol(notNull=True, default=DEFAULT)
    log_body = StringCol(notNull=True)
    gpgkey = ForeignKey(dbName='gpgkey', foreignKey='GPGKey', default=None)

    revision_author_id = Int(name='revision_author', allow_none=False)
    revision_author = Reference(revision_author_id, 'RevisionAuthor.id')

    revision_id = StringCol(notNull=True,
                            alternateID=True,
                            alternateMethodName='byRevisionID')
    revision_date = UtcDateTimeCol(notNull=False)

    karma_allocated = BoolCol(default=False, notNull=True)

    properties = SQLMultipleJoin('RevisionProperty', joinColumn='revision')

    @property
    def parents(self):
        """See IRevision.parents"""
        return shortlist(
            RevisionParent.selectBy(revision=self, orderBy='sequence'))

    @property
    def parent_ids(self):
        """Sequence of globally unique ids for the parents of this revision.

        The corresponding Revision objects can be retrieved, if they are
        present in the database, using the RevisionSet Zope utility.
        """
        return [parent.parent_id for parent in self.parents]

    def getLefthandParent(self):
        if len(self.parent_ids) == 0:
            parent_id = NULL_REVISION
        else:
            parent_id = self.parent_ids[0]
        return RevisionSet().getByRevisionId(parent_id)

    def getProperties(self):
        """See `IRevision`."""
        return dict((prop.name, prop.value) for prop in self.properties)

    def allocateKarma(self, branch):
        """See `IRevision`."""
        # Always set karma_allocated to True so that Lp does not reprocess
        # junk and invalid user branches because they do not get karma.
        self.karma_allocated = True
        # If we know who the revision author is, give them karma.
        author = self.revision_author.person
        if author is not None and branch is not None:
            # Backdate the karma to the time the revision was created.  If the
            # revision_date on the revision is in future (for whatever weird
            # reason) we will use the date_created from the revision (which
            # will be now) as the karma date created.  Having future karma
            # events is both wrong, as the revision has been created (and it
            # is lying), and a problem with the way the Launchpad code
            # currently does its karma degradation over time.
            karma_date = min(self.revision_date, self.date_created)
            karma = branch.target.assignKarma(author, 'revisionadded',
                                              karma_date)
            return karma
        else:
            return None

    def getBranch(self, allow_private=False, allow_junk=True):
        """See `IRevision`."""
        from lp.code.model.branch import Branch
        from lp.code.model.branchrevision import BranchRevision

        store = Store.of(self)

        query = And(self.id == BranchRevision.revision_id,
                    BranchRevision.branch_id == Branch.id)
        if not allow_private:
            query = And(
                query, Branch.information_type.is_in(PUBLIC_INFORMATION_TYPES))
        if not allow_junk:
            query = And(
                query,
                # Not-junk branches are either associated with a product
                # or with a source package.
                Or((Branch.product != None),
                   And(Branch.sourcepackagename != None,
                       Branch.distroseries != None)))
        result_set = store.find(Branch, query)
        if self.revision_author.person is None:
            result_set.order_by(Asc(BranchRevision.sequence))
        else:
            result_set.order_by(
                Branch.ownerID != self.revision_author.personID,
                Asc(BranchRevision.sequence))

        return result_set.first()
Esempio n. 8
0
class FieldAnswerGroup_v_29(ModelWithID):
    __storm_table__ = 'fieldanswergroup'
    number = Int(default=0)
    fieldanswer_id = Unicode()
Esempio n. 9
0
class Node(Model):
    """
    This table represent the System-wide settings
    """
    version = Unicode(default=unicode(__version__))
    version_db = Unicode(default=unicode(DATABASE_VERSION))

    name = Unicode(validator=shorttext_v, default=u'')

    public_site = Unicode(validator=shorttext_v, default=u'')
    hidden_service = Unicode(validator=shorttext_v, default=u'')

    receipt_salt = Unicode(validator=shorttext_v)

    languages_enabled = JSON(default=LANGUAGES_SUPPORTED_CODES)
    default_language = Unicode(validator=shorttext_v, default=u'en')
    default_timezone = Int(default=0)

    description = JSON(validator=longlocal_v, default=empty_localization)
    presentation = JSON(validator=longlocal_v, default=empty_localization)
    footer = JSON(validator=longlocal_v, default=empty_localization)
    security_awareness_title = JSON(validator=longlocal_v, default=empty_localization)
    security_awareness_text = JSON(validator=longlocal_v, default=empty_localization)
    context_selector_label = JSON(validator=longlocal_v, default=empty_localization)

    # Advanced settings
    maximum_namesize = Int(default=128)
    maximum_textsize = Int(default=4096)
    maximum_filesize = Int(default=30)
    tor2web_admin = Bool(default=True)
    tor2web_custodian = Bool(default=True)
    tor2web_whistleblower = Bool(default=False)
    tor2web_receiver = Bool(default=True)
    tor2web_unauth = Bool(default=True)
    allow_unencrypted = Bool(default=False)
    disable_encryption_warnings = Bool(default=False)
    allow_iframes_inclusion = Bool(default=False)
    submission_minimum_delay = Int(default=10)
    submission_maximum_ttl = Int(default=10800)

    # privileges of receivers
    can_postpone_expiration = Bool(default=False)
    can_delete_submission = Bool(default=False)
    can_grant_permissions = Bool(default=False)

    ahmia = Bool(default=False)
    wizard_done = Bool(default=False)

    disable_submissions = Bool(default=False)
    disable_privacy_badge = Bool(default=False)
    disable_security_awareness_badge = Bool(default=False)
    disable_security_awareness_questions = Bool(default=False)
    disable_key_code_hint = Bool(default=False)
    disable_donation_panel = Bool(default=False)

    enable_captcha = Bool(default=True)
    enable_proof_of_work = Bool(default=True)

    enable_experimental_features = Bool(default=False)

    whistleblowing_question = JSON(validator=longlocal_v, default=empty_localization)
    whistleblowing_button = JSON(validator=longlocal_v, default=empty_localization)

    simplified_login = Bool(default=True)

    enable_custom_privacy_badge = Bool(default=False)
    custom_privacy_badge_tor = JSON(validator=longlocal_v, default=empty_localization)
    custom_privacy_badge_none = JSON(validator=longlocal_v, default=empty_localization)

    header_title_homepage = JSON(validator=longlocal_v, default=empty_localization)
    header_title_submissionpage = JSON(validator=longlocal_v, default=empty_localization)
    header_title_receiptpage = JSON(validator=longlocal_v, default=empty_localization)
    header_title_tippage = JSON(validator=longlocal_v, default=empty_localization)

    widget_comments_title = JSON(validator=shortlocal_v, default=empty_localization)
    widget_messages_title = JSON(validator=shortlocal_v, default=empty_localization)
    widget_files_title = JSON(validator=shortlocal_v, default=empty_localization)

    landing_page = Unicode(default=u'homepage')

    show_contexts_in_alphabetical_order = Bool(default=False)

    threshold_free_disk_megabytes_high = Int(default=200)
    threshold_free_disk_megabytes_medium = Int(default=500)
    threshold_free_disk_megabytes_low = Int(default=1000)

    threshold_free_disk_percentage_high = Int(default=3)
    threshold_free_disk_percentage_medium = Int(default=5)
    threshold_free_disk_percentage_low = Int(default=10)

    unicode_keys = [
        'name',
        'public_site',
        'hidden_service',
        'default_language',
        'landing_page'
    ]

    int_keys = [
        'maximum_namesize',
        'maximum_textsize',
        'maximum_filesize',
        'default_timezone',
        'submission_minimum_delay',
        'submission_maximum_ttl',
        'threshold_free_disk_megabytes_high',
        'threshold_free_disk_megabytes_medium',
        'threshold_free_disk_megabytes_low',
        'threshold_free_disk_percentage_high',
        'threshold_free_disk_percentage_medium',
        'threshold_free_disk_percentage_low'
    ]

    bool_keys = ['tor2web_admin', 'tor2web_receiver', 'tor2web_whistleblower',
                 'tor2web_custodian', 'tor2web_unauth',
                 'can_postpone_expiration', 'can_delete_submission', 'can_grant_permissions',
                 'ahmia',
                 'allow_unencrypted',
                 'disable_encryption_warnings',
                 'simplified_login',
                 'show_contexts_in_alphabetical_order',
                 'allow_iframes_inclusion',
                 'disable_submissions',
                 'disable_privacy_badge', 'disable_security_awareness_badge',
                 'disable_security_awareness_questions', 'enable_custom_privacy_badge',
                 'disable_key_code_hint',
                 'disable_donation_panel',
                 'enable_captcha',
                 'enable_proof_of_work',
                 'enable_experimental_features']

    # wizard_done is not checked because it's set by the backend

    localized_keys = [
        'description',
        'presentation',
        'footer',
        'security_awareness_title',
        'security_awareness_text',
        'whistleblowing_question',
        'whistleblowing_button',
        'custom_privacy_badge_tor',
        'custom_privacy_badge_none',
        'header_title_homepage',
        'header_title_submissionpage',
        'header_title_receiptpage',
        'header_title_tippage',
        'context_selector_label',
        'widget_comments_title',
        'widget_messages_title',
        'widget_files_title'
    ]
Esempio n. 10
0
class StructuralSubscription(Storm):
    """A subscription to a Launchpad structure."""

    implements(IStructuralSubscription)

    __storm_table__ = 'StructuralSubscription'

    id = Int(primary=True)

    productID = Int("product", default=None)
    product = Reference(productID, "Product.id")

    productseriesID = Int("productseries", default=None)
    productseries = Reference(productseriesID, "ProductSeries.id")

    projectID = Int("project", default=None)
    project = Reference(projectID, "ProjectGroup.id")

    milestoneID = Int("milestone", default=None)
    milestone = Reference(milestoneID, "Milestone.id")

    distributionID = Int("distribution", default=None)
    distribution = Reference(distributionID, "Distribution.id")

    distroseriesID = Int("distroseries", default=None)
    distroseries = Reference(distroseriesID, "DistroSeries.id")

    sourcepackagenameID = Int("sourcepackagename", default=None)
    sourcepackagename = Reference(sourcepackagenameID, "SourcePackageName.id")

    subscriberID = Int("subscriber", allow_none=False,
                        validator=validate_person)
    subscriber = Reference(subscriberID, "Person.id")

    subscribed_byID = Int("subscribed_by", allow_none=False,
                          validator=validate_public_person)
    subscribed_by = Reference(subscribed_byID, "Person.id")

    date_created = DateTime(
        "date_created", allow_none=False, default=UTC_NOW,
        tzinfo=pytz.UTC)
    date_last_updated = DateTime(
        "date_last_updated", allow_none=False, default=UTC_NOW,
        tzinfo=pytz.UTC)

    def __init__(self, subscriber, subscribed_by, **kwargs):
        self.subscriber = subscriber
        self.subscribed_by = subscribed_by
        for arg, value in kwargs.iteritems():
            setattr(self, arg, value)

    @property
    def target(self):
        """See `IStructuralSubscription`."""
        if self.product is not None:
            return self.product
        elif self.productseries is not None:
            return self.productseries
        elif self.project is not None:
            return self.project
        elif self.milestone is not None:
            return self.milestone
        elif self.distribution is not None:
            if self.sourcepackagename is not None:
                # Circular imports.
                from lp.registry.model.distributionsourcepackage import (
                    DistributionSourcePackage)
                return DistributionSourcePackage(
                    self.distribution, self.sourcepackagename)
            else:
                return self.distribution
        elif self.distroseries is not None:
            return self.distroseries
        else:
            raise AssertionError('StructuralSubscription has no target.')

    @property
    def bug_filters(self):
        """See `IStructuralSubscription`."""
        return IStore(BugSubscriptionFilter).find(
            BugSubscriptionFilter,
            BugSubscriptionFilter.structural_subscription == self)

    def newBugFilter(self):
        """See `IStructuralSubscription`."""
        bug_filter = BugSubscriptionFilter()
        bug_filter.structural_subscription = self
        # This flush is needed for the web service API.
        IStore(StructuralSubscription).flush()
        return bug_filter

    def delete(self):
        BugSubscriptionFilter.deleteMultiple(
            [bf.id for bf in self.bug_filters])
        Store.of(self).remove(self)
Esempio n. 11
0
class POFileStatsJob(StormBase, BaseRunnableJob):
    """The details for a POFile status update job."""

    __storm_table__ = 'POFileStatsJob'

    config = config.IPOFileStatsJobSource

    # Instances of this class are runnable jobs.
    implements(IRunnableJob)

    # Oddly, BaseRunnableJob inherits from BaseRunnableJobSource so this class
    # is both the factory for jobs (the "implements", above) and the source
    # for runnable jobs (not the constructor of the job source, the class
    # provides the IJobSource interface itself).
    classProvides(IPOFileStatsJobSource)

    # The Job table contains core job details.
    job_id = Int('job', primary=True)
    job = Reference(job_id, Job.id)

    # This is the POFile which needs its statistics updated.
    pofile_id = Int('pofile')
    pofile = Reference(pofile_id, POFile.id)

    def __init__(self, pofile):
        self.job = Job()
        self.pofile = pofile
        super(POFileStatsJob, self).__init__()

    def getOperationDescription(self):
        """See `IRunnableJob`."""
        return 'updating POFile statistics'

    def run(self):
        """See `IRunnableJob`."""
        logger = logging.getLogger()
        logger.info('Updating statistics for %s' % self.pofile.title)
        self.pofile.updateStatistics()

        # Next we have to find any POFiles that share translations with the
        # above POFile so we can update their statistics too.  To do that we
        # first have to find the set of shared templates.
        subset = getUtility(IPOTemplateSet).getSharingSubset(
            product=self.pofile.potemplate.product,
            distribution=self.pofile.potemplate.distribution,
            sourcepackagename=self.pofile.potemplate.sourcepackagename)
        shared_templates = subset.getSharingPOTemplates(
            self.pofile.potemplate.name)
        # Now we have to find any POFiles that translate the shared templates
        # into the same language as the POFile this job is about.
        for template in shared_templates:
            pofile = template.getPOFileByLang(self.pofile.language.code)
            if pofile is None:
                continue
            pofile.updateStatistics()

    @staticmethod
    def iterReady():
        """See `IJobSource`."""
        return IStore(POFileStatsJob).find((POFileStatsJob),
            And(POFileStatsJob.job == Job.id,
                Job.id.is_in(Job.ready_jobs)))

    def makeDerived(self):
        """Support UniversalJobSource.

        (Most Job ORM classes are generic, because their database table is
        used for several related job types.  Therefore, they have derived
        classes to implement the specific Job.

        POFileStatsJob implements the specific job, so its makeDerived returns
        itself.)
        """
        return self

    def getDBClass(self):
        return self.__class__
Esempio n. 12
0
class FieldOption_v_22(Model):
    __storm_table__ = 'fieldoption'
    field_id = Unicode()
    presentation_order = Int()
    attrs = JSON()
Esempio n. 13
0
class DBIssue(object):
    """
    Maps elements from X{issues} table.

    @param issue: identifier of the issue
    @type issue: C{str}
    @param tracker_id: identifier of the tracker
    @type tracker_id: C{int}

    @ivar __storm_table__: Name of the database table.
    @type __storm_table__: C{str}

    @ivar id: Database issue identifier.
    @type id: L{storm.locals.Int}
    @ivar issue: Issue identifier. 
    @type issue: L{storm.locals.Unicode}
    @ivar type: Type of the issue.
    @type type: L{storm.locals.Unicode}
    @ivar summary: Summary of the issue.
    @type summary: L{storm.locals.Unicode}
    @ivar description: Description of the issue.
    @type description: L{storm.locals.Unicode}
    @ivar status: Status of the issue.
    @type status: L{storm.locals.Unicode}
    @ivar resolution: Resolution of the issue.
    @type resolution: L{storm.locals.Unicode}
    @ivar priority: Priority of the issue.
    @type priority: L{storm.locals.Unicode}
    @ivar submitted_by: Identifier of the user that submitted the issue.
    @type submitted_by: L{storm.locals.Int}
    @ivar submitted_on: Date when the issue was submitted
    @type submitted_on: L{storm.locals.DateTime}
    @ivar assigned_to: Identifier of the user assigned to this issue.
    @type assigned_to: L{storm.locals.Int}
    @ivar tracker_id: Tracker identifier.
    @type tracker_id: L{storm.locals.Int}
    @ivar tracker: Reference to L{DBTracker} object.
    @type tracker: L{storm.locals.Reference}
    @ivar submitted: Reference to L{DBPeople} object.
    @type submitted: L{storm.locals.Reference}
    @ivar assigned: Reference to L{DBPeople} object.
    @type assigned: L{storm.locals.Reference}
    """
    __storm_table__ = 'issues'

    id = Int(primary=True)
    issue = Unicode()
    type = Unicode()
    summary = Unicode()
    description = Unicode()
    status = Unicode()
    resolution = Unicode()
    priority = Unicode()
    submitted_by = Int()
    submitted_on = DateTime()
    assigned_to = Int()
    tracker_id = Int()

    tracker = Reference(tracker_id, DBTracker.id)
    submitted = Reference(submitted_by, DBPeople.id)
    assigned = Reference(assigned_to, DBPeople.id)

    def __init__(self, issue, tracker_id):
        self.issue = unicode(issue)
        self.tracker_id = tracker_id
Esempio n. 14
0
class DistributionSourcePackageInDatabase(Storm):
    """Temporary class to allow access to the database."""

    # XXX: allenap 2008-11-13 bug=297736: This is a temporary measure
    # while DistributionSourcePackage is not yet hooked into the
    # database but we need access to some of the fields in the
    # database.

    __storm_table__ = 'DistributionSourcePackage'

    id = Int(primary=True)

    distribution_id = Int(name='distribution')
    distribution = Reference(distribution_id, 'Distribution.id')

    sourcepackagename_id = Int(name='sourcepackagename')
    sourcepackagename = Reference(sourcepackagename_id, 'SourcePackageName.id')

    bug_reporting_guidelines = Unicode()
    bug_reported_acknowledgement = Unicode()

    bug_count = Int()
    po_message_count = Int()
    is_upstream_link_allowed = Bool()
    enable_bugfiling_duplicate_search = Bool()

    @property
    def currentrelease(self):
        """See `IDistributionSourcePackage`."""
        releases = self.distribution.getCurrentSourceReleases(
            [self.sourcepackagename])
        return releases.get(self)

    # This is a per-thread LRU cache of mappings from (distribution_id,
    # sourcepackagename_id)) to dsp_id. See get() for how this cache helps to
    # avoid database hits without causing consistency issues.
    _cache = ThreadLocalLRUCache(1000, 700)
    # Synchronize the mapping cache with transactions. The mapping is not
    # especially useful after a tranaction completes because Storm invalidates
    # its caches, and leaving the mapping cache in place causes difficult to
    # understand test interactions.
    transaction.manager.registerSynch(_cache)

    @classmethod
    def get(cls, distribution, sourcepackagename):
        """Get a DSP given distribution and source package name.

        Attempts to use a cached `(distro_id, spn_id) --> dsp_id` mapping to
        avoid hitting the database.
        """
        # Check for a cached mapping from (distro_id, spn_id) to dsp_id.
        dsp_cache_key = distribution.id, sourcepackagename.id
        dsp_id = cls._cache.get(dsp_cache_key)
        # If not, fetch from the database.
        if dsp_id is None:
            return cls.getDirect(distribution, sourcepackagename)
        # Try store.get(), allowing Storm to answer from cache if it can.
        store = Store.of(distribution)
        dsp = store.get(DistributionSourcePackageInDatabase, dsp_id)
        # If it's not found, query the database; the mapping might be stale.
        if dsp is None:
            return cls.getDirect(distribution, sourcepackagename)
        # Check that the mapping in the cache was correct.
        if distribution.id != dsp.distribution_id:
            return cls.getDirect(distribution, sourcepackagename)
        if sourcepackagename.id != dsp.sourcepackagename_id:
            return cls.getDirect(distribution, sourcepackagename)
        # Cache hit, phew.
        return dsp

    @classmethod
    def getDirect(cls, distribution, sourcepackagename):
        """Get a DSP given distribution and source package name.

        Caches the `(distro_id, spn_id) --> dsp_id` mapping, but does not
        otherwise use the cache; it always goes to the database.
        """
        dsp = Store.of(distribution).find(
            DistributionSourcePackageInDatabase,
            DistributionSourcePackageInDatabase.sourcepackagename ==
            sourcepackagename, DistributionSourcePackageInDatabase.distribution
            == distribution).one()
        dsp_cache_key = distribution.id, sourcepackagename.id
        if dsp is None:
            pass  # No way to eject things from the cache!
        else:
            cls._cache[dsp_cache_key] = dsp.id
        return dsp

    @classmethod
    def new(cls,
            distribution,
            sourcepackagename,
            is_upstream_link_allowed=False):
        """Create a new DSP with the given parameters.

        Caches the `(distro_id, spn_id) --> dsp_id` mapping.
        """
        dsp = DistributionSourcePackageInDatabase()
        dsp.distribution = distribution
        dsp.sourcepackagename = sourcepackagename
        dsp.is_upstream_link_allowed = is_upstream_link_allowed
        Store.of(distribution).add(dsp)
        Store.of(distribution).flush()
        dsp_cache_key = distribution.id, sourcepackagename.id
        cls._cache[dsp_cache_key] = dsp.id
        return dsp
Esempio n. 15
0
class Person(object):
    __storm_table__ = 'person'
    id = Int(primary=True)
    name = Unicode()
Esempio n. 16
0
class Notification(Model):
    """
    This table has only one instance, and contain all the notification
    information for the node templates are imported in the handler, but
    settings are expected all at once.
    """
    server = Unicode(validator=shorttext_v, default=u'demo.globaleaks.org')
    port = Int(default=9267)

    username = Unicode(validator=shorttext_v, default=u'hey_you_should_change_me')
    password = Unicode(validator=shorttext_v, default=u'yes_you_really_should_change_me')

    source_name = Unicode(validator=shorttext_v, default=u'GlobaLeaks - CHANGE EMAIL ACCOUNT USED FOR NOTIFICATION')
    source_email = Unicode(validator=shorttext_v, default=u'*****@*****.**')

    security = Unicode(validator=shorttext_v, default=u'TLS')
    # security_types: 'TLS', 'SSL', 'PLAIN'

    # Admin
    admin_pgp_alert_mail_title = JSON(validator=longlocal_v)
    admin_pgp_alert_mail_template = JSON(validator=longlocal_v)
    admin_anomaly_mail_template = JSON(validator=longlocal_v)
    admin_anomaly_mail_title = JSON(validator=longlocal_v)
    admin_anomaly_disk_low = JSON(validator=longlocal_v)
    admin_anomaly_disk_medium = JSON(validator=longlocal_v)
    admin_anomaly_disk_high = JSON(validator=longlocal_v)
    admin_anomaly_activities = JSON(validator=longlocal_v)

    # Receiver
    tip_mail_template = JSON(validator=longlocal_v)
    tip_mail_title = JSON(validator=longlocal_v)
    file_mail_template = JSON(validator=longlocal_v)
    file_mail_title = JSON(validator=longlocal_v)
    comment_mail_template = JSON(validator=longlocal_v)
    comment_mail_title = JSON(validator=longlocal_v)
    message_mail_template = JSON(validator=longlocal_v)
    message_mail_title = JSON(validator=longlocal_v)
    tip_expiration_mail_template = JSON(validator=longlocal_v)
    tip_expiration_mail_title = JSON(validator=longlocal_v)
    pgp_alert_mail_title = JSON(validator=longlocal_v)
    pgp_alert_mail_template = JSON(validator=longlocal_v)
    receiver_notification_limit_reached_mail_template = JSON(validator=longlocal_v)
    receiver_notification_limit_reached_mail_title = JSON(validator=longlocal_v)

    export_template = JSON(validator=longlocal_v)
    export_message_recipient = JSON(validator=longlocal_v)
    export_message_whistleblower = JSON(validator=longlocal_v)

    # Whistleblower Identity
    identity_access_authorized_mail_template = JSON(validator=longlocal_v)
    identity_access_authorized_mail_title = JSON(validator=longlocal_v)
    identity_access_denied_mail_template = JSON(validator=longlocal_v)
    identity_access_denied_mail_title = JSON(validator=longlocal_v)
    identity_access_request_mail_template = JSON(validator=longlocal_v)
    identity_access_request_mail_title = JSON(validator=longlocal_v)
    identity_provided_mail_template = JSON(validator=longlocal_v)
    identity_provided_mail_title = JSON(validator=longlocal_v)

    disable_admin_notification_emails = Bool(default=False)
    disable_custodian_notification_emails = Bool(default=False)
    disable_receiver_notification_emails = Bool(default=False)
    send_email_for_every_event = Bool(default=True)

    tip_expiration_threshold = Int(default=72)
    notification_threshold_per_hour = Int(default=20)
    notification_suspension_time=Int(default=(2 * 3600))

    exception_email_address = Unicode(validator=shorttext_v, default=u'*****@*****.**')
    exception_email_pgp_key_info = Unicode(default=u'')
    exception_email_pgp_key_fingerprint = Unicode(default=u'')
    exception_email_pgp_key_public = Unicode(default=u'')
    exception_email_pgp_key_expiration = DateTime(default_factory=datetime_null)
    exception_email_pgp_key_status = Unicode(default=u'disabled')

    unicode_keys = [
        'server',
        'username',
        'password',
        'source_name',
        'source_email',
        'security',
        'exception_email_address'
    ]

    localized_keys = [
        'admin_anomaly_mail_title',
        'admin_anomaly_mail_template',
        'admin_anomaly_disk_low',
        'admin_anomaly_disk_medium',
        'admin_anomaly_disk_high',
        'admin_anomaly_activities',
        'admin_pgp_alert_mail_title',
        'admin_pgp_alert_mail_template',
        'pgp_alert_mail_title',
        'pgp_alert_mail_template',
        'tip_mail_template',
        'tip_mail_title',
        'file_mail_template',
        'file_mail_title',
        'comment_mail_template',
        'comment_mail_title',
        'message_mail_template',
        'message_mail_title',
        'tip_expiration_mail_template',
        'tip_expiration_mail_title',
        'receiver_notification_limit_reached_mail_template',
        'receiver_notification_limit_reached_mail_title',
        'identity_access_authorized_mail_template',
        'identity_access_authorized_mail_title',
        'identity_access_denied_mail_template',
        'identity_access_denied_mail_title',
        'identity_access_request_mail_template',
        'identity_access_request_mail_title',
        'identity_provided_mail_template',
        'identity_provided_mail_title',
        'export_template',
        'export_message_whistleblower',
        'export_message_recipient'
    ]

    int_keys = [
        'port',
        'tip_expiration_threshold',
        'notification_threshold_per_hour',
        'notification_suspension_time',
    ]

    bool_keys = [
        'disable_admin_notification_emails',
        'disable_receiver_notification_emails',
        'send_email_for_every_event'
    ]
Esempio n. 17
0
class WhistleblowerTip_v_34(models.ModelWithID):
    __storm_table__ = 'whistleblowertip'
    internaltip_id = Unicode()
    receipt_hash = Unicode()
    access_counter = Int(default=0)
Esempio n. 18
0
class FieldAnswerGroup(Model):
    number = Int(default=0)
    fieldanswer_id = Unicode()

    unicode_keys = ['fieldanswer_id']
    int_keys = ['number']
Esempio n. 19
0
class BugTrackerComponentGroup(StormBase):
    """A collection of components in a remote bug tracker.

    Some bug trackers organize sets of components into higher level
    groups, such as Bugzilla's 'product'.
    """
    __storm_table__ = 'BugTrackerComponentGroup'

    id = Int(primary=True)
    name = Unicode(allow_none=False)
    bug_tracker_id = Int('bug_tracker')
    bug_tracker = Reference(bug_tracker_id, 'BugTracker.id')
    components = ReferenceSet(
        id,
        BugTrackerComponent.component_group_id,
        order_by=BugTrackerComponent.name)

    def addComponent(self, component_name):
        """Adds a component that is synced from a remote bug tracker"""

        component = BugTrackerComponent()
        component.name = component_name
        component.component_group = self

        store = IStore(BugTrackerComponent)
        store.add(component)
        store.flush()

        return component

    def getComponent(self, component_name):
        """Retrieves a component by the given name or id number.

        None is returned if there is no component by that name in the
        group.
        """

        if component_name is None:
            return None
        elif component_name.isdigit():
            component_id = int(component_name)
            return Store.of(self).find(
                BugTrackerComponent,
                BugTrackerComponent.id == component_id,
                BugTrackerComponent.component_group == self.id).one()
        else:
            return Store.of(self).find(
                BugTrackerComponent,
                BugTrackerComponent.name == component_name,
                BugTrackerComponent.component_group == self.id).one()

    def addCustomComponent(self, component_name):
        """Adds a component locally that isn't synced from a remote tracker
        """

        component = BugTrackerComponent()
        component.name = component_name
        component.component_group = self
        component.is_custom = True

        store = IStore(BugTrackerComponent)
        store.add(component)
        store.flush()

        return component
Esempio n. 20
0
class Stats(Model):
    start = DateTime()
    summary = JSON()
    free_disk_space = Int()
Esempio n. 21
0
class Node_v_32(models.ModelWithID):
    __storm_table__ = 'node'
    version = Unicode(default=unicode(__version__))
    version_db = Unicode(default=unicode(DATABASE_VERSION))
    name = Unicode(validator=shorttext_v, default=u'')
    basic_auth = Bool(default=False)
    basic_auth_username = Unicode(default=u'')
    basic_auth_password = Unicode(default=u'')
    public_site = Unicode(validator=shorttext_v, default=u'')
    hidden_service = Unicode(validator=shorttext_v, default=u'')
    receipt_salt = Unicode(validator=shorttext_v)
    languages_enabled = JSON(default=LANGUAGES_SUPPORTED_CODES)
    default_language = Unicode(validator=shorttext_v, default=u'en')
    default_timezone = Int(default=0)
    default_password = Unicode(validator=longtext_v, default=u'globaleaks')
    description = JSON(validator=longlocal_v, default_factory=dict)
    presentation = JSON(validator=longlocal_v, default_factory=dict)
    footer = JSON(validator=longlocal_v, default_factory=dict)
    security_awareness_title = JSON(validator=longlocal_v,
                                    default_factory=dict)
    security_awareness_text = JSON(validator=longlocal_v, default_factory=dict)
    maximum_namesize = Int(default=128)
    maximum_textsize = Int(default=4096)
    maximum_filesize = Int(default=30)
    tor2web_admin = Bool(default=True)
    tor2web_custodian = Bool(default=True)
    tor2web_whistleblower = Bool(default=False)
    tor2web_receiver = Bool(default=True)
    tor2web_unauth = Bool(default=True)
    allow_unencrypted = Bool(default=False)
    disable_encryption_warnings = Bool(default=False)
    allow_iframes_inclusion = Bool(default=False)
    submission_minimum_delay = Int(default=10)
    submission_maximum_ttl = Int(default=10800)
    can_postpone_expiration = Bool(default=False)
    can_delete_submission = Bool(default=False)
    can_grant_permissions = Bool(default=False)
    ahmia = Bool(default=False)
    allow_indexing = Bool(default=False)
    wizard_done = Bool(default=False)

    disable_submissions = Bool(default=False)
    disable_privacy_badge = Bool(default=False)
    disable_security_awareness_badge = Bool(default=False)
    disable_security_awareness_questions = Bool(default=False)
    disable_key_code_hint = Bool(default=False)
    disable_donation_panel = Bool(default=False)

    enable_captcha = Bool(default=True)
    enable_proof_of_work = Bool(default=True)

    enable_experimental_features = Bool(default=False)

    whistleblowing_question = JSON(validator=longlocal_v, default_factory=dict)
    whistleblowing_button = JSON(validator=longlocal_v, default_factory=dict)
    whistleblowing_receipt_prompt = JSON(validator=longlocal_v,
                                         default_factory=dict)

    simplified_login = Bool(default=True)

    enable_custom_privacy_badge = Bool(default=False)
    custom_privacy_badge_tor = JSON(validator=longlocal_v,
                                    default_factory=dict)
    custom_privacy_badge_none = JSON(validator=longlocal_v,
                                     default_factory=dict)

    header_title_homepage = JSON(validator=longlocal_v, default_factory=dict)
    header_title_submissionpage = JSON(validator=longlocal_v,
                                       default_factory=dict)
    header_title_receiptpage = JSON(validator=longlocal_v,
                                    default_factory=dict)
    header_title_tippage = JSON(validator=longlocal_v, default_factory=dict)

    widget_comments_title = JSON(validator=shortlocal_v, default_factory=dict)
    widget_messages_title = JSON(validator=shortlocal_v, default_factory=dict)
    widget_files_title = JSON(validator=shortlocal_v, default_factory=dict)

    landing_page = Unicode(default=u'homepage')

    contexts_clarification = JSON(validator=longlocal_v, default_factory=dict)
    show_small_context_cards = Bool(default=False)
    show_contexts_in_alphabetical_order = Bool(default=False)

    threshold_free_disk_megabytes_high = Int(default=200)
    threshold_free_disk_megabytes_medium = Int(default=500)
    threshold_free_disk_megabytes_low = Int(default=1000)

    threshold_free_disk_percentage_high = Int(default=3)
    threshold_free_disk_percentage_medium = Int(default=5)
    threshold_free_disk_percentage_low = Int(default=10)

    context_selector_type = Unicode(validator=shorttext_v, default=u'list')
Esempio n. 22
0
class Anomalies(Model):
    date = DateTime()
    alarm = Int()
    events = JSON()
Esempio n. 23
0
class Thread(Storm):
    """
    A thread of archived email, from a mailing-list. It is identified by both
    the list name and the thread id.
    """

    __storm_table__ = "thread"
    __storm_primary__ = "list_name", "thread_id"

    list_name = Unicode()
    thread_id = Unicode()
    date_active = DateTime()
    category_id = Int()
    emails = ReferenceSet((list_name, thread_id),
                          (Email.list_name, Email.thread_id),
                          order_by=Email.date)
    emails_by_reply = ReferenceSet((list_name, thread_id),
                                   (Email.list_name, Email.thread_id),
                                   order_by=Email.thread_order)
    category_obj = Reference(category_id, "Category.id")
    _starting_email = None

    def __init__(self, list_name, thread_id, date_active=None):
        self.list_name = unicode(list_name)
        self.thread_id = unicode(thread_id)
        self.date_active = date_active

    @property
    def _starting_email_req(self):
        """ Returns the request to get the starting email.
        If there are no results with in_reply_to IS NULL, then it's
        probably a partial import and we don't have the real first email.
        In this case, use the date.
        """
        return self.emails.order_by(Email.in_reply_to != None, Email.date)

    @property
    def starting_email(self):
        """Return (and cache) the email starting this thread"""
        if self._starting_email is None:
            self._starting_email = self._starting_email_req.first()
        return self._starting_email

    @property
    def last_email(self):
        return self.emails.order_by(Desc(Email.date)).first()

    @property
    def subject(self):
        """Return the subject of this thread"""
        if self._starting_email is not None:
            return self.starting_email.subject
        else:
            # Don't get the whole message if it's not cached yet (useful for
            # HyperKitty's thread view).
            return self._starting_email_req.values(Email.subject).next()

    @property
    def participants(self):
        """Set of email senders in this thread"""
        p = []
        for sender in self.emails.find().config(
                distinct=True).order_by().values(Email.sender_name,
                                                 Email.sender_email):
            p.append(sender)
        return p

    @property
    def email_ids(self):
        return list(self.emails.find().order_by().values(Email.message_id))

    @property
    def email_id_hashes(self):
        return list(self.emails.find().order_by().values(
            Email.message_id_hash))

    def __len__(self):
        return self.emails.count()

    def replies_after(self, date):
        return self.emails.find(Email.date > date)

    def _get_category(self):
        if not self.category_id:
            return None
        return self.category_obj.name

    def _set_category(self, name):
        if not name:
            self.category_id = None
            return
        # XXX: this is VERY hackish
        store = self.__storm_object_info__["store"]
        category = store.find(Category, Category.name == name).one()
        if category is None:
            category = Category(name)
            store.add(category)
            store.flush()
        self.category_id = category.id

    category = property(_get_category, _set_category)

    def __storm_pre_flush__(self):
        """Auto-set the active date from the last email in thread"""
        if self.date_active is not None:
            return
        email_dates = list(
            self.emails.order_by(Desc(Email.date)).config(limit=1).values(
                Email.date))
        if email_dates:
            self.date_active = email_dates[0]
        else:
            self.date_active = datetime.datetime.now()
Esempio n. 24
0
class ApplicationData(Model):
    version = Int()
    default_questionnaire = JSON()

    int_keys = ['version']
    json_keys = ['default_questionnaire']
Esempio n. 25
0
class Node_v_30(ModelWithID):
    __storm_table__ = 'node'
    version = Unicode()
    version_db = Unicode()
    name = Unicode()
    public_site = Unicode()
    hidden_service = Unicode()
    receipt_salt = Unicode()
    languages_enabled = JSON()
    default_language = Unicode()
    default_timezone = Int()
    description = JSON()
    presentation = JSON()
    footer = JSON()
    security_awareness_title = JSON()
    security_awareness_text = JSON()
    context_selector_label = JSON()
    maximum_namesize = Int()
    maximum_textsize = Int()
    maximum_filesize = Int()
    tor2web_admin = Bool()
    tor2web_custodian = Bool()
    tor2web_whistleblower = Bool()
    tor2web_receiver = Bool()
    tor2web_unauth = Bool()
    allow_unencrypted = Bool()
    disable_encryption_warnings = Bool()
    allow_iframes_inclusion = Bool()
    submission_minimum_delay = Int()
    submission_maximum_ttl = Int()
    can_postpone_expiration = Bool()
    can_delete_submission = Bool()
    can_grant_permissions = Bool()
    ahmia = Bool()
    wizard_done = Bool()
    disable_submissions = Bool()
    disable_privacy_badge = Bool()
    disable_security_awareness_badge = Bool()
    disable_security_awareness_questions = Bool()
    disable_key_code_hint = Bool()
    disable_donation_panel = Bool()
    enable_captcha = Bool()
    enable_proof_of_work = Bool()
    enable_experimental_features = Bool()
    whistleblowing_question = JSON()
    whistleblowing_button = JSON()
    simplified_login = Bool()
    enable_custom_privacy_badge = Bool()
    custom_privacy_badge_tor = JSON()
    custom_privacy_badge_none = JSON()
    header_title_homepage = JSON()
    header_title_submissionpage = JSON()
    header_title_receiptpage = JSON()
    header_title_tippage = JSON()
    widget_comments_title = JSON()
    widget_messages_title = JSON()
    widget_files_title = JSON()
    landing_page = Unicode()
    show_contexts_in_alphabetical_order = Bool()
    threshold_free_disk_megabytes_high = Int()
    threshold_free_disk_megabytes_medium = Int()
    threshold_free_disk_megabytes_low = Int()
    threshold_free_disk_percentage_high = Int()
    threshold_free_disk_percentage_medium = Int()
    threshold_free_disk_percentage_low = Int()
Esempio n. 26
0
class ArchiveSubscriber(Storm):
    """See `IArchiveSubscriber`."""
    __storm_table__ = 'ArchiveSubscriber'

    id = Int(primary=True)

    archive_id = Int(name='archive', allow_none=False)
    archive = Reference(archive_id, 'Archive.id')

    registrant_id = Int(name='registrant', allow_none=False)
    registrant = Reference(registrant_id, 'Person.id')

    date_created = DateTime(name='date_created',
                            allow_none=False,
                            tzinfo=pytz.UTC)

    subscriber_id = Int(name='subscriber',
                        allow_none=False,
                        validator=validate_person)
    subscriber = Reference(subscriber_id, 'Person.id')

    date_expires = DateTime(name='date_expires',
                            allow_none=True,
                            tzinfo=pytz.UTC)

    status = DBEnum(name='status',
                    allow_none=False,
                    enum=ArchiveSubscriberStatus)

    description = Unicode(name='description', allow_none=True)

    date_cancelled = DateTime(name='date_cancelled',
                              allow_none=True,
                              tzinfo=pytz.UTC)

    cancelled_by_id = Int(name='cancelled_by', allow_none=True)
    cancelled_by = Reference(cancelled_by_id, 'Person.id')

    @property
    def displayname(self):
        """See `IArchiveSubscriber`."""
        return "%s's access to %s" % (self.subscriber.displayname,
                                      self.archive.displayname)

    def cancel(self, cancelled_by):
        """See `IArchiveSubscriber`."""
        # The bulk cancel normally has stricter permissions, but if we've
        # got this far then we know the caller has enough permissions to
        # cancel just this subscription.
        removeSecurityProxy(getUtility(IArchiveSubscriberSet)).cancel(
            [self.id], cancelled_by)

    def getNonActiveSubscribers(self):
        """See `IArchiveSubscriber`."""
        store = Store.of(self)
        if self.subscriber.is_team:

            # We get all the people who already have active tokens for
            # this archive (for example, through separate subscriptions).
            auth_token = LeftJoin(
                ArchiveAuthToken,
                And(ArchiveAuthToken.person_id == Person.id,
                    ArchiveAuthToken.archive_id == self.archive_id,
                    ArchiveAuthToken.date_deactivated == None))

            team_participation = Join(TeamParticipation,
                                      TeamParticipation.personID == Person.id)

            # Only return people with preferred email address set.
            preferred_email = Join(EmailAddress,
                                   EmailAddress.personID == Person.id)

            # We want to get all participants who are themselves
            # individuals, not teams:
            non_active_subscribers = store.using(
                Person, team_participation, preferred_email, auth_token).find(
                    (Person, EmailAddress),
                    EmailAddress.status == EmailAddressStatus.PREFERRED,
                    TeamParticipation.teamID == self.subscriber_id,
                    Person.teamowner == None,
                    # There is no existing archive auth token.
                    ArchiveAuthToken.person_id == None)
            non_active_subscribers.order_by(Person.name)
            return non_active_subscribers
        else:
            # Subscriber is not a team.
            token_set = getUtility(IArchiveAuthTokenSet)
            if token_set.getActiveTokenForArchiveAndPerson(
                    self.archive, self.subscriber) is not None:
                # There are active tokens, so return an empty result
                # set.
                return EmptyResultSet()

            # Otherwise return a result set containing only the
            # subscriber and their preferred email address.
            return store.find(
                (Person, EmailAddress), Person.id == self.subscriber_id,
                EmailAddress.personID == Person.id,
                EmailAddress.status == EmailAddressStatus.PREFERRED)
Esempio n. 27
0
class Node(Model):
    """
    This table has only one instance, has the "id", but would not exists a second element
    of this table. This table acts, more or less, like the configuration file of the previous
    GlobaLeaks release (and some of the GL 0.1 details are specified in Context)

    This table represent the System-wide settings
    """
    __storm_table__ = 'node'

    name = Unicode(validator=shorttext_v)
    public_site = Unicode()
    hidden_service = Unicode()
    email = Unicode()
    receipt_salt = Unicode()
    last_update = DateTime()
    # this has a dedicated validator in update_node()
    receipt_regexp = Unicode()

    languages_enabled = Pickle()
    default_language = Unicode()

    # localized string
    description = Pickle(validator=longlocal_v)
    presentation = Pickle(validator=longlocal_v)
    footer = Pickle(validator=longlocal_v)
    subtitle = Pickle(validator=longlocal_v)

    # Here is set the time frame for the stats publicly exported by the node.
    # Expressed in hours
    stats_update_time = Int()

    # Advanced settings
    maximum_namesize = Int()
    maximum_textsize = Int()
    maximum_filesize = Int()
    tor2web_admin = Bool()
    tor2web_submission = Bool()
    tor2web_receiver = Bool()
    tor2web_unauth = Bool()
    allow_unencrypted = Bool()

    # privileges configurable in node/context/receiver
    postpone_superpower = Bool()
    can_delete_submission = Bool()
    ahmia = Bool()
    wizard_done = Bool(default=False)
    anomaly_checks = Bool(default=False)

    exception_email = Unicode()

    unicode_keys = [
        'name', 'public_site', 'email', 'hidden_service', 'exception_email',
        'default_language', 'receipt_regexp'
    ]
    int_keys = [
        'stats_update_time', 'maximum_namesize', 'maximum_textsize',
        'maximum_filesize'
    ]
    bool_keys = [
        'tor2web_admin', 'tor2web_receiver', 'tor2web_submission',
        'tor2web_unauth', 'postpone_superpower', 'anomaly_checks',
        'can_delete_submission', 'ahmia', 'allow_unencrypted'
    ]
    # wizard_done is not checked because it's set by the backend
    localized_strings = ['description', 'presentation', 'footer', 'subtitle']
Esempio n. 28
0
class Address(object):
    __storm_table__ = 'address'
    id = Int(primary=True)
    address = Unicode()
    person_id = Int()
    person = Reference(person_id, Person.id)
Esempio n. 29
0
class Notification_v_33(ModelWithID):
    __storm_table__ = 'notification'

    server = Unicode(validator=shorttext_v, default=u'demo.globaleaks.org')
    port = Int(default=9267)
    username = Unicode(validator=shorttext_v, default=u'hey_you_should_change_me')
    password = Unicode(validator=shorttext_v, default=u'yes_you_really_should_change_me')
    source_name = Unicode(validator=shorttext_v, default=u'GlobaLeaks - CHANGE EMAIL ACCOUNT USED FOR NOTIFICATION')
    source_email = Unicode(validator=shorttext_v, default=u'*****@*****.**')
    security = Unicode(validator=shorttext_v, default=u'TLS')
    admin_pgp_alert_mail_title = JSON(validator=longlocal_v)
    admin_pgp_alert_mail_template = JSON(validator=longlocal_v)
    admin_anomaly_mail_template = JSON(validator=longlocal_v)
    admin_anomaly_mail_title = JSON(validator=longlocal_v)
    admin_anomaly_disk_low = JSON(validator=longlocal_v)
    admin_anomaly_disk_medium = JSON(validator=longlocal_v)
    admin_anomaly_disk_high = JSON(validator=longlocal_v)
    admin_anomaly_activities = JSON(validator=longlocal_v)
    admin_test_static_mail_template = JSON(validator=longlocal_v)
    admin_test_static_mail_title = JSON(validator=longlocal_v)
    tip_mail_template = JSON(validator=longlocal_v)
    tip_mail_title = JSON(validator=longlocal_v)
    file_mail_template = JSON(validator=longlocal_v)
    file_mail_title = JSON(validator=longlocal_v)
    comment_mail_template = JSON(validator=longlocal_v)
    comment_mail_title = JSON(validator=longlocal_v)
    message_mail_template = JSON(validator=longlocal_v)
    message_mail_title = JSON(validator=longlocal_v)
    tip_expiration_mail_template = JSON(validator=longlocal_v)
    tip_expiration_mail_title = JSON(validator=longlocal_v)
    pgp_alert_mail_title = JSON(validator=longlocal_v)
    pgp_alert_mail_template = JSON(validator=longlocal_v)
    receiver_notification_limit_reached_mail_template = JSON(validator=longlocal_v)
    receiver_notification_limit_reached_mail_title = JSON(validator=longlocal_v)
    export_template = JSON(validator=longlocal_v)
    export_message_recipient = JSON(validator=longlocal_v)
    export_message_whistleblower = JSON(validator=longlocal_v)
    identity_access_authorized_mail_template = JSON(validator=longlocal_v)
    identity_access_authorized_mail_title = JSON(validator=longlocal_v)
    identity_access_denied_mail_template = JSON(validator=longlocal_v)
    identity_access_denied_mail_title = JSON(validator=longlocal_v)
    identity_access_request_mail_template = JSON(validator=longlocal_v)
    identity_access_request_mail_title = JSON(validator=longlocal_v)
    identity_provided_mail_template = JSON(validator=longlocal_v)
    identity_provided_mail_title = JSON(validator=longlocal_v)
    disable_admin_notification_emails = Bool(default=False)
    disable_custodian_notification_emails = Bool(default=False)
    disable_receiver_notification_emails = Bool(default=False)
    send_email_for_every_event = Bool(default=True)
    tip_expiration_threshold = Int(validator=natnum_v, default=72)
    notification_threshold_per_hour = Int(validator=natnum_v, default=20)
    notification_suspension_time=Int(validator=natnum_v, default=(2 * 3600))
    exception_email_address = Unicode(validator=shorttext_v, default=u'*****@*****.**')
    exception_email_pgp_key_fingerprint = Unicode(default=u'')
    exception_email_pgp_key_public = Unicode(default=u'')
    exception_email_pgp_key_expiration = DateTime(default_factory=datetime_null)

    localized_keys = [
        'admin_anomaly_mail_title',
        'admin_anomaly_mail_template',
        'admin_anomaly_disk_low',
        'admin_anomaly_disk_medium',
        'admin_anomaly_disk_high',
        'admin_anomaly_activities',
        'admin_pgp_alert_mail_title',
        'admin_pgp_alert_mail_template',
        'admin_test_static_mail_template',
        'admin_test_static_mail_title',
        'pgp_alert_mail_title',
        'pgp_alert_mail_template',
        'tip_mail_template',
        'tip_mail_title',
        'file_mail_template',
        'file_mail_title',
        'comment_mail_template',
        'comment_mail_title',
        'message_mail_template',
        'message_mail_title',
        'tip_expiration_mail_template',
        'tip_expiration_mail_title',
        'receiver_notification_limit_reached_mail_template',
        'receiver_notification_limit_reached_mail_title',
        'identity_access_authorized_mail_template',
        'identity_access_authorized_mail_title',
        'identity_access_denied_mail_template',
        'identity_access_denied_mail_title',
        'identity_access_request_mail_template',
        'identity_access_request_mail_title',
        'identity_provided_mail_template',
        'identity_provided_mail_title',
        'export_template',
        'export_message_whistleblower',
        'export_message_recipient'
    ]
Esempio n. 30
0
class DBAlluraIssueExt(object):
    # FIXME: Do we really need all this comments? DRY!!!
    """
    Maps elements from X{issues_ext_allura} table.

    @param labels: issue labels
    @type labels: C{str}
    @param private: issue private or not
    @type private: C{boolean}
    @param ticket_num: identifier of the issue
    @type ticket_num: C{int}
    @param discussion_thread_url: issue url for discussion thread
    @type discussion_thread_url: L{storm.locals.Unicode}
    @param related_artifacts: issue related artifacts
    @type related_artifacts: L{storm.locals.Unicode}
    @param custom_fields: issue custom fields
    @type custom_fields: L{storm.locals.Unicode}
    @param mod_date: issue modification date
    @type mod_date: L{storm.locals.Date}

    @param issue_id: identifier of the issue
    @type issue_id: C{int}


    @ivar __storm_table__: Name of the database table.
    @type __storm_table__: C{str}

    @ivar id: Extra issue fields identifier.
    @type id: L{storm.locals.Int}
    @ivar labels: issue labels
    @type labels: L{storm.locals.Unicode}
    @ivar private: issue private or not
    @type private: L{storm.locals.Boolean}
    @ivar ticket_num: Issue identifier.
    @type ticket_num: L{storm.locals.Int}
    @ivar discussion_thread_url: issue url for discussion thread
    @type discussion_thread_url: L{storm.locals.Unicode}
    @ivar related_artifacts: issue related artifacts
    @type related_artifacts: L{storm.locals.Unicode}
    @ivar custom_fields: issue custom fields
    @type custom_fields: L{storm.locals.Unicode}
    @ivar mod_date: issue modification date
    @type mod_date: L{storm.locals.Date}
    @ivar issue_id: Issue identifier.
    @type issue_id: L{storm.locals.Int}
    @ivar issue: Reference to L{DBIssue} object.
    @type issue: L{storm.locals.Reference}
    """
        
    __storm_table__ = 'issues_ext_allura'

    id = Int(primary=True)
    labels = Unicode()
    private = Bool()
    ticket_num = Int()
    discussion_thread_url = Unicode()
    related_artifacts = Unicode()
    custom_fields = Unicode()
    mod_date = DateTime()        
    issue_id = Int()
            
    issue = Reference(issue_id, DBIssue.id)
    
    def __init__(self, issue_id):
        self.issue_id = issue_id