コード例 #1
0
ファイル: __init__.py プロジェクト: ktdreyer/pulp
class RepositoryContentUnit(AutoRetryDocument):
    """
    Represents the link between a repository and the units associated with it.

    This inherits from mongoengine.Document and defines the schema for the documents
    in repo_content_units collection.


    :ivar repo_id: string representation of the repository id
    :type repo_id: mongoengine.StringField
    :ivar unit_id: string representation of content unit id
    :type unit_id: mongoengine.StringField
    :ivar unit_type_id: string representation of content unit type
    :type unit_type_id: mongoengine.StringField
    :ivar created: ISO8601 representation of the time the association was created
    :type created: pulp.server.db.fields.ISO8601StringField
    :ivar updated: ISO8601 representation of last time a copy, sync, or upload ensured that
                   the association existed
    :type updated: pulp.server.db.fields.ISO8601StringField
    :ivar _ns: The namespace field (Deprecated), reading
    :type _ns: mongoengine.StringField
    """

    repo_id = StringField(required=True)
    unit_id = StringField(required=True)
    unit_type_id = StringField(required=True)

    created = ISO8601StringField(
        required=True,
        default=lambda: dateutils.format_iso8601_utc_timestamp(
            dateutils.now_utc_timestamp()))
    updated = ISO8601StringField(
        required=True,
        default=lambda: dateutils.format_iso8601_utc_timestamp(
            dateutils.now_utc_timestamp()))

    # For backward compatibility
    _ns = StringField(default='repo_content_units')

    meta = {
        'collection':
        'repo_content_units',
        'allow_inheritance':
        False,
        'indexes': [
            {
                'fields': ['repo_id', 'unit_type_id', 'unit_id'],
                'unique': True
            },
            {
                # Used for reverse lookup of units to repositories
                'fields': ['unit_id']
            }
        ]
    }
コード例 #2
0
class Importer(AutoRetryDocument):
    """
    Defines schema for an Importer in the `repo_importers` collection.
    """
    repo_id = StringField(required=True)
    importer_type_id = StringField(required=True)
    config = DictField()
    scratchpad = DictField(default=None)
    last_sync = ISO8601StringField()

    # For backward compatibility
    _ns = StringField(default='repo_importers')
    serializer = serializers.ImporterSerializer

    meta = {
        'collection': 'repo_importers',
        'allow_inheritance': False,
        'indexes': [{
            'fields': ['-repo_id', '-importer_type_id'],
            'unique': True
        }],
        'queryset_class': CriteriaQuerySet
    }

    @classmethod
    def pre_delete(cls, sender, document, **kwargs):
        """
        Purge the lazy catalog of all entries for the importer being deleted.

        :param sender:   class of sender (unused)
        :type  sender:   object
        :param document: mongoengine document being deleted.
        :type  document: pulp.server.db.model.Importer
        """
        query_set = LazyCatalogEntry.objects(importer_id=str(document.id))
        _logger.debug(
            _('Deleting lazy catalog entries for the {repo} repository.').
            format(repo=document.repo_id))
        query_set.delete()
コード例 #3
0
class TaskStatus(AutoRetryDocument, ReaperMixin):
    """
    Represents a task.

    Defines the schema for the documents in task_status collection. The documents in this
    collection may be reaped, so it inherits from ReaperMixin.

    :ivar task_id:     identity of the task this status corresponds to
    :type task_id:     basestring
    :ivar worker_name: The name of the worker that the Task is in
    :type worker_name: basestring
    :ivar tags:        custom tags on the task
    :type tags:        list
    :ivar state:       state of callable in its lifecycle
    :type state:       basestring
    :ivar error: Any errors or collections of errors that occurred while this task was running
    :type error: dict (created from a PulpException)
    :ivar spawned_tasks: List of tasks that were spawned during the running of this task
    :type spawned_tasks: list of str
    :ivar progress_report: A report containing information about task's progress
    :type progress_report: dict
    :ivar task_type:   the fully qualified (package/method) type of the task
    :type task_type:   basestring
    :ivar start_time:  ISO8601 representation of the time the task started executing
    :type start_time:  basestring
    :ivar finish_time: ISO8601 representation of the time the task completed
    :type finish_time: basestring
    :ivar group_id:    The id used to identify which  group of tasks a task belongs to
    :type group_id:    uuid.UUID
    :ivar result:      return value of the callable, if any
    :type result:      any
    :ivar exception:   Deprecated. This is always None.
    :type exception:   None
    :ivar traceback:   Deprecated. This is always None.
    :type traceback:   None
    """

    task_id = StringField(required=True)
    worker_name = StringField()
    tags = ListField(StringField())
    state = StringField(choices=constants.CALL_STATES, default=constants.CALL_WAITING_STATE)
    error = DictField(default=None)
    spawned_tasks = ListField(StringField())
    progress_report = DictField()
    task_type = StringField()
    start_time = ISO8601StringField()
    finish_time = ISO8601StringField()
    result = DynamicField()
    group_id = UUIDField(default=None)

    # These are deprecated, and will always be None
    exception = StringField()
    traceback = StringField()

    # For backward compatibility
    _ns = StringField(default='task_status')

    meta = {'collection': 'task_status',
            'indexes': ['-tags', '-state', {'fields': ['-task_id'], 'unique': True}, '-group_id'],
            'allow_inheritance': False,
            'queryset_class': CriteriaQuerySet}

    def save_with_set_on_insert(self, fields_to_set_on_insert):
        """
        Save the current state of the TaskStatus to the database, using an upsert operation.
        The upsert operation will only set those fields if this becomes an insert operation,
        otherwise those fields will be ignored. This also validates the fields according to the
        schema above.

        This is required because the current mongoengine version we are using does not support
        upsert with set_on_insert through mongoengine queries. Once we update to the version
        which supports this, this method can be deleted and it's usages can be replaced
        with mongoengine upsert queries.

        :param fields_to_set_on_insert: A list of field names that should be updated with Mongo's
                                        $setOnInsert operator.
        :type  fields_to_set_on_insert: list
        """

        # If fields_to_set_on_insert is None or empty, just save
        if not fields_to_set_on_insert:
            self.save()
            return

        # This will be used in place of superclass' save method, so we need to call validate()
        # explicitly.
        self.validate()

        stuff_to_update = dict(copy.deepcopy(self._data))

        # Let's pop the $setOnInsert attributes out of the copy of self so that we can pass the
        # remaining attributes to the $set operator in the query below.
        set_on_insert = {}
        for field in fields_to_set_on_insert:
            set_on_insert[field] = stuff_to_update.pop(field)
        task_id = stuff_to_update.pop('task_id')

        update = {'$set': stuff_to_update,
                  '$setOnInsert': set_on_insert}
        TaskStatus._get_collection().update({'task_id': task_id}, update, upsert=True)

    @classmethod
    def post_save(cls, sender, document, **kwargs):
        """
        Send a taskstatus message on save.

        :param sender: class of sender (unused)
        :type  sender: class
        :param document: mongoengine document
        :type  document: mongoengine.Document

        """
        send_taskstatus_message(document, routing_key="tasks.%s" % document['task_id'])
コード例 #4
0
class Importer(AutoRetryDocument):
    """
    Defines schema for an Importer in the `repo_importers` collection.
    """
    repo_id = StringField(required=True)
    importer_type_id = StringField(required=True)
    config = DictField()
    scratchpad = DictField(default=None)
    last_sync = ISO8601StringField()
    last_updated = UTCDateTimeField()
    last_override_config = DictField()

    # For backward compatibility
    _ns = StringField(default='repo_importers')
    SERIALIZER = serializers.ImporterSerializer

    meta = {'collection': 'repo_importers',
            'allow_inheritance': False,
            'indexes': [{'fields': ['-repo_id', '-importer_type_id'], 'unique': True}],
            'queryset_class': CriteriaQuerySet}

    @classmethod
    def pre_delete(cls, sender, document, **kwargs):
        """
        Purge the lazy catalog of all entries for the importer being deleted.

        :param sender:   class of sender (unused)
        :type  sender:   object
        :param document: mongoengine document being deleted.
        :type  document: pulp.server.db.model.Importer
        """
        query_set = LazyCatalogEntry.objects(importer_id=str(document.id))
        _logger.debug(_('Deleting lazy catalog entries for the {repo} repository.').format(
            repo=document.repo_id))
        query_set.delete()

    @classmethod
    def pre_save(cls, sender, document, **kwargs):
        """
        The signal that is triggered before importer is saved.

        :param sender:   class of sender (unused)
        :type sender:    object
        :param document: mongoengne document being saved
        :type document:  pulp.server.db.model.Importer
        """
        document.last_updated = dateutils.now_utc_datetime_with_tzinfo()

    def delete(self):
        """
        Delete the Importer. Remove any documents it has stored.
        """
        if os.path.exists(self._local_storage_path):
            shutil.rmtree(self._local_storage_path)
        super(Importer, self).delete()

    def save(self):
        """
        Save the Importer. Additionally, write any pki documents from its config into disk storage
        for use by requests.
        """
        super(Importer, self).save()
        # A map of Importer config key names to file paths for the TLS PEM settings.
        pem_keys_paths = (
            (importer_constants.KEY_SSL_CA_CERT, self.tls_ca_cert_path),
            (importer_constants.KEY_SSL_CLIENT_CERT, self.tls_client_cert_path),
            (importer_constants.KEY_SSL_CLIENT_KEY, self.tls_client_key_path))
        for key, path in pem_keys_paths:
            self._write_pem_file(key, path)

    @property
    def tls_ca_cert_path(self):
        """
        Return the path where the TLS CA certificate should be stored for this Importer.

        :rtype: basestring
        """
        return os.path.join(self._pki_path, 'ca.crt')

    @property
    def tls_client_cert_path(self):
        """
        Return the path where the TLS client certificate should be stored for this Importer.

        :rtype: basestring
        """
        return os.path.join(self._pki_path, 'client.crt')

    @property
    def tls_client_key_path(self):
        """
        Return the path where the TLS client key should be stored for this Importer.

        :rtype: basestring
        """
        return os.path.join(self._pki_path, 'client.key')

    @property
    def _local_storage_path(self):
        """
        Return the path that the Importer should use for local storage.

        :rtype: basestring
        """
        return os.path.join(
            LOCAL_STORAGE, 'importers',
            '{repo}-{importer_type}'.format(repo=self.repo_id, importer_type=self.importer_type_id))

    @property
    def _pki_path(self):
        """
        Return the path that all pki files should be stored within for this Importer.

        :rtype: basestring
        """
        return os.path.join(self._local_storage_path, 'pki')

    def _write_pem_file(self, config_key, path):
        """
        Write the PEM data from self.config[config_key] to the given path, if the key is defined and
        is "truthy".

        :param config_key: The key corresponding to a value in self.config to write to path.
        :type  config_key: basestring
        :param path:       The path to write the PEM data to.
        :type  path:       basestring
        """
        if config_key in self.config and self.config[config_key]:
            if not os.path.exists(self._pki_path):
                misc.mkdir(os.path.dirname(self._pki_path))
                os.mkdir(self._pki_path, 0700)
            with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, 0600), 'w') as pem_file:
                pem_file.write(self.config[config_key].encode('utf-8'))