Beispiel #1
0
class Distributor(AutoRetryDocument):
    """
    Defines schema for a Distributor in the 'repo_distributors' collection.
    """
    repo_id = StringField(required=True)
    distributor_id = StringField(required=True, regex=r'^[\-_A-Za-z0-9]+$')
    distributor_type_id = StringField(required=True)
    config = DictField()
    auto_publish = BooleanField(default=False)
    last_publish = UTCDateTimeField()
    last_updated = UTCDateTimeField()
    last_override_config = DictField()
    scratchpad = DictField()

    _ns = StringField(default='repo_distributors')
    SERIALIZER = serializers.Distributor

    meta = {
        'collection': 'repo_distributors',
        'allow_inheritance': False,
        'indexes': [{
            'fields': ['-repo_id', '-distributor_id'],
            'unique': True
        }],
        'queryset_class': CriteriaQuerySet
    }

    @property
    def resource_tag(self):
        """
        :return: a globally unique identifier for the repo and distributor that
                 can be used in cross-type comparisons.
        :rtype:  basestring
        """
        return 'pulp:distributor:{0}:{1}'.format(self.repo_id,
                                                 self.distributor_id)

    @classmethod
    def pre_save_signal(cls, sender, document, **kwargs):
        """
        The signal that is triggered before distributor is saved.

        :param sender: sender class
        :type sender: object
        :param document: Document that sent the signal
        :type document: Distributor
        """
        document.last_updated = dateutils.now_utc_datetime_with_tzinfo()
Beispiel #2
0
def main():
    """
    This is the high level entry method. It does logging if any Exceptions are raised.
    """
    if os.getuid() == 0:
        print >> sys.stderr, _(
            'This must not be run as root, but as the same user apache runs as.'
        )
        return os.EX_USAGE
    try:
        options = parse_args()
        _start_logging()
        connection.initialize(max_timeout=1)

        # Prompt the user if there are workers that have not timed out
        if filter(
                lambda worker: (UTCDateTimeField().to_python(datetime.now()) -
                                worker['last_heartbeat']) < timedelta(
                                    seconds=constants.CELERY_TIMEOUT_SECONDS),
                status.get_workers()):
            if not _user_input_continue(
                    'There are still running workers, continuing could '
                    'corrupt your Pulp installation. Are you sure you wish '
                    'to continue?'):
                return os.EX_OK
        return _auto_manage_db(options)
    except UnperformedMigrationException:
        return 1
    except DataError, e:
        _logger.critical(str(e))
        _logger.critical(''.join(traceback.format_exception(*sys.exc_info())))
        return os.EX_DATAERR
Beispiel #3
0
class Worker(AutoRetryDocument):
    """
    Represents a worker.

    Defines the schema for the documents in the worker collection.

    :ivar name:    worker name, in the form of "worker_type@hostname"
    :type name:    mongoengine.StringField
    :ivar last_heartbeat:  A timestamp of the last heartbeat from the Worker
    :type last_heartbeat:  UTCDateTimeField
    """
    name = StringField(primary_key=True)
    last_heartbeat = UTCDateTimeField()

    # For backward compatibility
    _ns = StringField(default='workers')

    meta = {'collection': 'workers',
            'indexes': [],  # this is a small collection that does not need an index
            'allow_inheritance': False,
            'queryset_class': WorkerQuerySet}

    @property
    def queue_name(self):
        """
        This property is a convenience for getting the queue_name that Celery assigns to this
        Worker.

        :return: The name of the queue that this Worker is uniquely subcribed to.
        :rtype:  basestring
        """
        return "%(name)s.dq" % {'name': self.name}
Beispiel #4
0
class Distributor(AutoRetryDocument):
    """
    Defines schema for a Distributor in the 'repo_distributors' collection.
    """
    repo_id = StringField(required=True)
    distributor_id = StringField(required=True, regex=r'^[\-_A-Za-z0-9]+$')
    distributor_type_id = StringField(required=True)
    config = DictField()
    auto_publish = BooleanField(default=False)
    last_publish = UTCDateTimeField()
    scratchpad = DictField()

    _ns = StringField(default='repo_distributors')
    SERIALIZER = serializers.Distributor

    meta = {'collection': 'repo_distributors',
            'allow_inheritance': False,
            'indexes': [{'fields': ['-repo_id', '-distributor_id'], 'unique': True}],
            'queryset_class': CriteriaQuerySet}

    @property
    def resource_tag(self):
        """
        :return: a globally unique identifier for the repo and distributor that
                 can be used in cross-type comparisons.
        :rtype:  basestring
        """
        return 'pulp:distributor:{0}:{1}'.format(self.repo_id, self.distributor_id)
Beispiel #5
0
 def test_user_not_prompted_if_workers_timeout(self, mock__auto_manage_db, mock__user_input,
                                               mock_get_workers, *unused_mocks):
     mock_get_workers.return_value = [{'last_heartbeat':
                                      UTCDateTimeField().to_python(datetime(2000, 1, 1))}]
     manage.main()
     # make sure user is not asked for input when workers have timed out
     self.assertFalse(mock__user_input.called)
Beispiel #6
0
def main():
    """
    This is the high level entry method. It does logging if any Exceptions are raised.
    """
    if os.getuid() == 0:
        print >> sys.stderr, _(
            'This must not be run as root, but as the same user apache runs as.'
        )
        return os.EX_USAGE
    try:
        options = parse_args()
        _start_logging()
        connection.initialize(max_timeout=1)
        active_workers = None

        if not options.dry_run:
            active_workers = status.get_workers()

        if active_workers:
            last_worker_time = max(
                [worker['last_heartbeat'] for worker in active_workers])
            time_from_last = UTCDateTimeField().to_python(
                datetime.utcnow()) - last_worker_time
            wait_time = timedelta(
                seconds=constants.MIGRATION_WAIT_TIME) - time_from_last

            if wait_time > timedelta(0):
                print _('\nThe following processes might still be running:')
                for worker in active_workers:
                    print _('\t%s' % worker['name'])

                for i in range(wait_time.seconds, 0, -1):
                    print _(
                        '\rPlease wait %s seconds while Pulp confirms this.' %
                        i),
                    sys.stdout.flush()
                    time.sleep(1)

                still_active_workers = [
                    worker for worker in status.get_workers()
                    if worker['last_heartbeat'] > last_worker_time
                ]

                if still_active_workers:
                    print >> sys.stderr, _(
                        '\n\nThe following processes are still running, please'
                        ' stop the running workers before retrying the'
                        ' pulp-manage-db command.')
                    for worker in still_active_workers:
                        print _('\t%s' % worker['name'])

                    return os.EX_SOFTWARE
        return _auto_manage_db(options)
    except UnperformedMigrationException:
        return 1
    except DataError, e:
        _logger.critical(str(e))
        _logger.critical(''.join(traceback.format_exception(*sys.exc_info())))
        return os.EX_DATAERR
Beispiel #7
0
 def test_user_prompted_if_workers_tasks_exist(self, mock__auto_manage_db,
                                               mock__user_input, mock_get_workers,
                                               *unused_mocks):
     mock_get_workers.return_value = [{'last_heartbeat':
                                      UTCDateTimeField().to_python(datetime.now())}]
     ret = manage.main()
     # make sure system exits ok if user chooses not to proceed
     self.assertEqual(ret, os.EX_OK)
     # make sure user is asked for input when there are workers running
     self.assertTrue(mock__user_input.called)
Beispiel #8
0
class CeleryBeatLock(AutoRetryDocument):
    """
    Single document collection which gives information about the current celerybeat lock.

    :ivar name: string representing the celerybeat instance name
    :type name: basestring
    :ivar timestamp: The timestamp(UTC) at which lock is acquired
    :type timestamp: datetime.datetime
    :ivar lock: A unique key set to "locked" when lock is acquired.
    :type lock: basestring
    :ivar _ns: (Deprecated), Contains the name of the collection this model represents
    :type _ns: mongoengine.StringField
    """
    name = StringField(required=True)
    timestamp = UTCDateTimeField(required=True)
    lock = StringField(required=True, default="locked", unique=True)

    # For backward compatibility
    _ns = StringField(default='celery_beat_lock')
Beispiel #9
0
class Repository(AutoRetryDocument):
    """
    Defines schema for a pulp repository in the `repos` collection.

    :ivar repo_id: unique across all repos
    :type repo_id: mongoengine.StringField
    :ivar display_name: user-readable name of the repository
    :type display_name: mongoengine.StringField
    :ivar description: free form text provided by the user to describe the repo
    :type description: mongoengine.StringField
    :ivar notes: arbitrary key-value pairs programmatically describing the repo;
                 these are intended as a way to describe the repo usage or
                 organizational purposes and should not vary depending on the
                 actual content of the repo
    :type notes: mongoengine.DictField
    :ivar content_unit_counts: key-value pairs of number of units associated with this repo.
                               This is different than the number of associations, since a
                               unit may be associated multiple times.
    :type content_unit_counts: mongoengine.DictField
    :ivar scratchpad: Field used to persistently store arbitrary information from the plugins
                      across multiple operations.
    :type scratchpad: mongoengine.DictField
    :ivar last_unit_added: Datetime of the most recent occurence of adding a unit to the repo
    :type last_unit_added: UTCDateTimeField
    :ivar last_unit_removed: Datetime of the most recent occurence of removing a unit from the repo
    :type last_unit_removed: UTCDateTimeField
    :ivar _ns: (Deprecated) Namespace of repo, included for backwards compatibility.
    :type _is: mongoengine.StringField
    """

    # Previously, this field was 'id'. This field is required to be unique, but the previous index
    # was '-id'. Setting unique=True here would generate a new 'repo_id' index. Instead, we set the
    # index in meta and enforce uniqueness there.
    repo_id = StringField(required=True, regex=r'^[.\-_A-Za-z0-9]+$')
    display_name = StringField()
    description = StringField()
    notes = DictField()
    scratchpad = DictField(default={})
    content_unit_counts = DictField(default={})
    last_unit_added = UTCDateTimeField()
    last_unit_removed = UTCDateTimeField()

    # For backward compatibility
    _ns = StringField(default='repos')

    meta = {'collection': 'repos',
            'allow_inheritance': False,
            'indexes': [{'fields': ['-repo_id'], 'unique': True}],
            'queryset_class': RepoQuerySet}
    SERIALIZER = serializers.Repository

    def to_transfer_repo(self):
        """
        Converts the given database representation of a repository into a plugin repository transfer
        object, including any other fields that need to be included.

        Note: In the transfer unit, the repo_id is accessed with obj.id for backwards compatability.

        :return: transfer object used in many plugin API calls
        :rtype:  pulp.plugins.model.Repository}
        """
        r = plugin_repo(self.repo_id, self.display_name, self.description, self.notes,
                        content_unit_counts=self.content_unit_counts,
                        last_unit_added=self.last_unit_added,
                        last_unit_removed=self.last_unit_removed, repo_obj=self)
        return r

    def update_from_delta(self, repo_delta):
        """
        Update the repository's fields from a delta. Keys that are not fields will be ignored.

        :param delta: key value pairs that represent the new values
        :type  delta: dict
        """

        # Notes is done seperately to only change notes fields that are specified. If a notes
        # field is set to None, remove it.
        if 'notes' in repo_delta:
            for key, value in repo_delta.pop('notes').items():
                if value is None:
                    self.notes.pop(key)
                else:
                    self.notes[key] = value

        # These keys may not be changed.
        prohibited = ['content_unit_counts', 'repo_id', 'last_unit_added', 'last_unit_removed']
        [setattr(self, key, value) for key, value in repo_delta.items() if key not in prohibited]
Beispiel #10
0
class Importer(AutoRetryDocument):
    """
    Defines schema for an Importer in the `repo_importers` collection.
    """
    repo_id = StringField(required=True)
    importer_type_id = StringField(required=True)
    config = DictField()
    scratchpad = DictField(default=None)
    last_sync = ISO8601StringField()
    last_updated = UTCDateTimeField()
    last_override_config = DictField()

    # For backward compatibility
    _ns = StringField(default='repo_importers')
    SERIALIZER = serializers.ImporterSerializer

    meta = {'collection': 'repo_importers',
            'allow_inheritance': False,
            'indexes': [{'fields': ['-repo_id', '-importer_type_id'], 'unique': True}],
            'queryset_class': CriteriaQuerySet}

    @classmethod
    def pre_delete(cls, sender, document, **kwargs):
        """
        Purge the lazy catalog of all entries for the importer being deleted.

        :param sender:   class of sender (unused)
        :type  sender:   object
        :param document: mongoengine document being deleted.
        :type  document: pulp.server.db.model.Importer
        """
        query_set = LazyCatalogEntry.objects(importer_id=str(document.id))
        _logger.debug(_('Deleting lazy catalog entries for the {repo} repository.').format(
            repo=document.repo_id))
        query_set.delete()

    @classmethod
    def pre_save(cls, sender, document, **kwargs):
        """
        The signal that is triggered before importer is saved.

        :param sender:   class of sender (unused)
        :type sender:    object
        :param document: mongoengne document being saved
        :type document:  pulp.server.db.model.Importer
        """
        document.last_updated = dateutils.now_utc_datetime_with_tzinfo()

    def delete(self):
        """
        Delete the Importer. Remove any documents it has stored.
        """
        if os.path.exists(self._local_storage_path):
            shutil.rmtree(self._local_storage_path)
        super(Importer, self).delete()

    def save(self):
        """
        Save the Importer. Additionally, write any pki documents from its config into disk storage
        for use by requests.
        """
        super(Importer, self).save()
        # A map of Importer config key names to file paths for the TLS PEM settings.
        pem_keys_paths = (
            (importer_constants.KEY_SSL_CA_CERT, self.tls_ca_cert_path),
            (importer_constants.KEY_SSL_CLIENT_CERT, self.tls_client_cert_path),
            (importer_constants.KEY_SSL_CLIENT_KEY, self.tls_client_key_path))
        for key, path in pem_keys_paths:
            self._write_pem_file(key, path)

    @property
    def tls_ca_cert_path(self):
        """
        Return the path where the TLS CA certificate should be stored for this Importer.

        :rtype: basestring
        """
        return os.path.join(self._pki_path, 'ca.crt')

    @property
    def tls_client_cert_path(self):
        """
        Return the path where the TLS client certificate should be stored for this Importer.

        :rtype: basestring
        """
        return os.path.join(self._pki_path, 'client.crt')

    @property
    def tls_client_key_path(self):
        """
        Return the path where the TLS client key should be stored for this Importer.

        :rtype: basestring
        """
        return os.path.join(self._pki_path, 'client.key')

    @property
    def _local_storage_path(self):
        """
        Return the path that the Importer should use for local storage.

        :rtype: basestring
        """
        return os.path.join(
            LOCAL_STORAGE, 'importers',
            '{repo}-{importer_type}'.format(repo=self.repo_id, importer_type=self.importer_type_id))

    @property
    def _pki_path(self):
        """
        Return the path that all pki files should be stored within for this Importer.

        :rtype: basestring
        """
        return os.path.join(self._local_storage_path, 'pki')

    def _write_pem_file(self, config_key, path):
        """
        Write the PEM data from self.config[config_key] to the given path, if the key is defined and
        is "truthy".

        :param config_key: The key corresponding to a value in self.config to write to path.
        :type  config_key: basestring
        :param path:       The path to write the PEM data to.
        :type  path:       basestring
        """
        if config_key in self.config and self.config[config_key]:
            if not os.path.exists(self._pki_path):
                misc.mkdir(os.path.dirname(self._pki_path))
                os.mkdir(self._pki_path, 0700)
            with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, 0600), 'w') as pem_file:
                pem_file.write(self.config[config_key].encode('utf-8'))