def test_publish_history_descending_sort(self):
        """
        Tests use the sort parameter to sort the results in descending order by start time
        """

        # Setup
        self.repo_manager.create_repo("test_sort")
        self.distributor_manager.add_distributor("test_sort", "mock-distributor", {}, True, distributor_id="test_dist")
        # Create some consecutive publish entries
        date_string = "2013-06-01T12:00:0%sZ"
        for i in range(0, 10, 2):
            r = RepoPublishResult.expected_result(
                "test_sort",
                "test_dist",
                "bar",
                date_string % str(i),
                date_string % str(i + 1),
                "test-summary",
                "test-details",
                RepoPublishResult.RESULT_SUCCESS,
            )
            RepoPublishResult.get_collection().insert(r, safe=True)

        # Test that returned entries are in descending order by time
        entries = self.publish_manager.publish_history("test_sort", "test_dist", sort=constants.SORT_DESCENDING)
        self.assertEqual(5, len(entries))
        for i in range(0, 4):
            first = dateutils.parse_iso8601_datetime(entries[i]["started"])
            second = dateutils.parse_iso8601_datetime(entries[i + 1]["started"])
            self.assertTrue(first > second)
    def test_publish_history_end_date(self):

        # Setup
        self.repo_manager.create_repo("test_date")
        self.distributor_manager.add_distributor("test_date", "mock-distributor", {}, True, distributor_id="test_dist")
        # Create three consecutive publish entries
        date_string = "2013-06-01T12:00:0%sZ"
        for i in range(0, 6, 2):
            r = RepoPublishResult.expected_result(
                "test_date",
                "test_dist",
                "bar",
                date_string % str(i),
                date_string % str(i + 1),
                "test-summary",
                "test-details",
                RepoPublishResult.RESULT_SUCCESS,
            )
            RepoPublishResult.get_collection().insert(r, safe=True)

        # Verify that all entries retrieved have dates prior to the given end date
        end_date = "2013-06-01T12:00:03Z"
        end_entries = self.publish_manager.publish_history("test_date", "test_dist", end_date=end_date)
        # Confirm the dates of the retrieved entries are earlier than or equal to the requested date
        self.assertEqual(2, len(end_entries))
        for entries in end_entries:
            retrieved = dateutils.parse_iso8601_datetime(entries["started"])
            given_end = dateutils.parse_iso8601_datetime(end_date)
            self.assertTrue(retrieved <= given_end)
Exemple #3
0
    def test_publish_history_start_date(self):

        # Setup
        self.repo_manager.create_repo('test_date')
        self.distributor_manager.add_distributor('test_date', 'mock-distributor', {}, True,
                                                 distributor_id='test_dist')
        # Create three consecutive publish entries
        date_string = '2013-06-01T12:00:0%sZ'
        for i in range(0, 6, 2):
            r = RepoPublishResult.expected_result(
                'test_date', 'test_dist', 'bar', date_string % str(i), date_string % str(i + 1),
                'test-summary', 'test-details', RepoPublishResult.RESULT_SUCCESS)
            RepoPublishResult.get_collection().insert(r, safe=True)

        # Verify
        self.assertEqual(3, len(self.publish_manager.publish_history('test_date', 'test_dist')))
        start_date = '2013-06-01T12:00:02Z'
        start_entries = self.publish_manager.publish_history('test_date', 'test_dist',
                                                             start_date=start_date)
        # Confirm the dates of the retrieved entries are later than or equal to the requested date
        self.assertEqual(2, len(start_entries))
        for entries in start_entries:
            retrieved = dateutils.parse_iso8601_datetime(entries['started'])
            given_start = dateutils.parse_iso8601_datetime(start_date)
            self.assertTrue(retrieved >= given_start)
Exemple #4
0
    def test_publish_history_end_date(self):

        # Setup
        self.repo_manager.create_repo('test_date')
        self.distributor_manager.add_distributor('test_date',
                                                 'mock-distributor', {},
                                                 True,
                                                 distributor_id='test_dist')
        # Create three consecutive publish entries
        date_string = '2013-06-01T12:00:0%sZ'
        for i in range(0, 6, 2):
            r = RepoPublishResult.expected_result(
                'test_date', 'test_dist', 'bar', date_string % str(i),
                date_string % str(i + 1), 'test-summary', 'test-details',
                RepoPublishResult.RESULT_SUCCESS)
            RepoPublishResult.get_collection().insert(r, safe=True)

        # Verify that all entries retrieved have dates prior to the given end date
        end_date = '2013-06-01T12:00:03Z'
        end_entries = self.publish_manager.publish_history('test_date',
                                                           'test_dist',
                                                           end_date=end_date)
        # Confirm the dates of the retrieved entries are earlier than or equal to the requested date
        self.assertEqual(2, len(end_entries))
        for entries in end_entries:
            retrieved = dateutils.parse_iso8601_datetime(entries['started'])
            given_end = dateutils.parse_iso8601_datetime(end_date)
            self.assertTrue(retrieved <= given_end)
Exemple #5
0
    def test_publish_history_ascending_sort(self):
        """
        Tests use the sort parameter to sort the results in ascending order by start time
        """

        # Setup
        self.repo_manager.create_repo('test_sort')
        self.distributor_manager.add_distributor('test_sort', 'mock-distributor', {}, True,
                                                 distributor_id='test_dist')
        # Create some consecutive publish entries
        date_string = '2013-06-01T12:00:0%sZ'
        for i in range(0, 10, 2):
            r = RepoPublishResult.expected_result(
                'test_sort', 'test_dist', 'bar', date_string % str(i), date_string % str(i + 1),
                'test-summary', 'test-details', RepoPublishResult.RESULT_SUCCESS)
            RepoPublishResult.get_collection().insert(r, safe=True)

        # Test that returned entries are in ascending order by time
        entries = self.publish_manager.publish_history('test_sort', 'test_dist',
                                                       sort=constants.SORT_ASCENDING)
        self.assertEqual(5, len(entries))
        for i in range(0, 4):
            first = dateutils.parse_iso8601_datetime(entries[i]['started'])
            second = dateutils.parse_iso8601_datetime(entries[i + 1]['started'])
            self.assertTrue(first < second)
Exemple #6
0
    def test_publish_history_descending_sort(self):
        """
        Tests use the sort parameter to sort the results in descending order by start time
        """

        # Setup
        self.repo_manager.create_repo('test_sort')
        self.distributor_manager.add_distributor('test_sort',
                                                 'mock-distributor', {},
                                                 True,
                                                 distributor_id='test_dist')
        # Create some consecutive publish entries
        date_string = '2013-06-01T12:00:0%sZ'
        for i in range(0, 10, 2):
            r = RepoPublishResult.expected_result(
                'test_sort', 'test_dist', 'bar', date_string % str(i),
                date_string % str(i + 1), 'test-summary', 'test-details',
                RepoPublishResult.RESULT_SUCCESS)
            RepoPublishResult.get_collection().insert(r, safe=True)

        # Test that returned entries are in descending order by time
        entries = self.publish_manager.publish_history(
            'test_sort', 'test_dist', sort=constants.SORT_DESCENDING)
        self.assertEqual(5, len(entries))
        for i in range(0, 4):
            first = dateutils.parse_iso8601_datetime(entries[i]['started'])
            second = dateutils.parse_iso8601_datetime(entries[i +
                                                              1]['started'])
            self.assertTrue(first > second)
Exemple #7
0
def add_result(repo_id, dist_id, offset):
    started = dateutils.now_utc_datetime_with_tzinfo()
    completed = started + datetime.timedelta(days=offset)
    r = RepoPublishResult.expected_result(
        repo_id, dist_id, 'bar', dateutils.format_iso8601_datetime(started),
        dateutils.format_iso8601_datetime(completed), 'test-summary', 'test-details',
        RepoPublishResult.RESULT_SUCCESS)
    RepoPublishResult.get_collection().insert(r, safe=True)
Exemple #8
0
def add_result(repo_id, dist_id, offset):
    started = dateutils.now_utc_datetime_with_tzinfo()
    completed = started + datetime.timedelta(days=offset)
    r = RepoPublishResult.expected_result(
        repo_id, dist_id, 'bar', dateutils.format_iso8601_datetime(started),
        dateutils.format_iso8601_datetime(completed), 'test-summary',
        'test-details', RepoPublishResult.RESULT_SUCCESS)
    RepoPublishResult.get_collection().insert(r, safe=True)
def add_result(repo_id, dist_id, offset):
    started = datetime.datetime.now(dateutils.local_tz())
    completed = started + datetime.timedelta(days=offset)
    r = RepoPublishResult.expected_result(
        repo_id,
        dist_id,
        "bar",
        dateutils.format_iso8601_datetime(started),
        dateutils.format_iso8601_datetime(completed),
        "test-summary",
        "test-details",
        RepoPublishResult.RESULT_SUCCESS,
    )
    RepoPublishResult.get_collection().insert(r, safe=True)
Exemple #10
0
class RepoPublishManager(object):
    @staticmethod
    def publish(repo_id, distributor_id, publish_config_override=None):
        """
        Requests the given distributor publish the repository it is configured
        on.

        The publish operation is executed synchronously in the caller's thread
        and will block until it is completed. The caller must take the necessary
        steps to address the fact that a publish call may be time intensive.

        @param repo_id: identifies the repo being published
        @type  repo_id: str

        @param distributor_id: identifies the repo's distributor to publish
        @type  distributor_id: str

        @param publish_config_override: optional config values to use for this
                                        publish call only
        @type  publish_config_override: dict, None

        :return: report of the details of the publish
        :rtype: pulp.server.db.model.repository.RepoPublishResult
        """
        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_distributor = distributor_coll.find_one({
            'repo_id': repo_id,
            'id': distributor_id
        })
        if repo_distributor is None:
            raise MissingResource(repository=repo_id,
                                  distributor=distributor_id)

        distributor_instance, distributor_config = RepoPublishManager.\
            _get_distributor_instance_and_config(repo_id, distributor_id)

        # Assemble the data needed for the publish
        conduit = RepoPublishConduit(repo_id, distributor_id)

        call_config = PluginCallConfiguration(distributor_config,
                                              repo_distributor['config'],
                                              publish_config_override)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(
            repo_distributor['distributor_type_id'], repo_id, mkdir=True)

        # Fire events describing the publish state
        fire_manager = manager_factory.event_fire_manager()
        fire_manager.fire_repo_publish_started(repo_id, distributor_id)
        result = RepoPublishManager._do_publish(repo, distributor_id,
                                                distributor_instance,
                                                transfer_repo, conduit,
                                                call_config)
        fire_manager.fire_repo_publish_finished(result)

        return result

    @staticmethod
    def _get_distributor_instance_and_config(repo_id, distributor_id):
        repo_distributor_manager = manager_factory.repo_distributor_manager()
        repo_distributor = repo_distributor_manager.get_distributor(
            repo_id, distributor_id)
        distributor, config = plugin_api.get_distributor_by_id(
            repo_distributor['distributor_type_id'])
        return distributor, config

    @staticmethod
    def _do_publish(repo, distributor_id, distributor_instance, transfer_repo,
                    conduit, call_config):

        distributor_coll = RepoDistributor.get_collection()
        publish_result_coll = RepoPublishResult.get_collection()
        repo_id = repo['id']

        # Perform the publish
        publish_start_timestamp = _now_timestamp()
        try:
            # Add the register_sigterm_handler decorator to the publish_repo call, so that we can
            # respond to signals by calling the Distributor's cancel_publish_repo() method.
            publish_repo = register_sigterm_handler(
                distributor_instance.publish_repo,
                distributor_instance.cancel_publish_repo)
            publish_report = publish_repo(transfer_repo, conduit, call_config)
        except Exception, e:
            publish_end_timestamp = _now_timestamp()

            # Reload the distributor in case the scratchpad is set by the plugin
            repo_distributor = distributor_coll.find_one({
                'repo_id': repo_id,
                'id': distributor_id
            })
            repo_distributor['last_publish'] = publish_end_timestamp
            distributor_coll.save(repo_distributor, safe=True)

            # Add a publish history entry for the run
            result = RepoPublishResult.error_result(
                repo_id, repo_distributor['id'],
                repo_distributor['distributor_type_id'],
                publish_start_timestamp, publish_end_timestamp, e,
                sys.exc_info()[2])
            publish_result_coll.save(result, safe=True)

            logger.exception(
                _('Exception caught from plugin during publish for repo [%(r)s]'
                  % {'r': repo_id}))
            raise

        publish_end_timestamp = _now_timestamp()

        # Reload the distributor in case the scratchpad is set by the plugin
        repo_distributor = distributor_coll.find_one({
            'repo_id': repo_id,
            'id': distributor_id
        })
        repo_distributor['last_publish'] = _now_timestamp()
        distributor_coll.save(repo_distributor, safe=True)

        # Add a publish entry
        if publish_report is not None and isinstance(publish_report,
                                                     PublishReport):
            summary = publish_report.summary
            details = publish_report.details
            if publish_report.success_flag:
                logger.debug(
                    'publish succeeded for repo [%s] with distributor ID [%s]'
                    % (repo_id, distributor_id))
                result_code = RepoPublishResult.RESULT_SUCCESS
            else:
                logger.info(
                    'publish failed for repo [%s] with distributor ID [%s]' %
                    (repo_id, distributor_id))
                logger.debug(
                    'summary for repo [%s] with distributor ID [%s]: %s' %
                    (repo_id, distributor_id, summary))
                result_code = RepoPublishResult.RESULT_FAILED
        else:
            msg = _(
                'Plugin type [%(type)s] on repo [%(repo)s] did not return a valid publish '
                'report')
            msg = msg % {
                'type': repo_distributor['distributor_type_id'],
                'repo': repo_id
            }
            logger.warn(msg)

            summary = details = _('Unknown')
            result_code = RepoPublishResult.RESULT_SUCCESS

        result = RepoPublishResult.expected_result(
            repo_id, repo_distributor['id'],
            repo_distributor['distributor_type_id'], publish_start_timestamp,
            publish_end_timestamp, summary, details, result_code)
        publish_result_coll.save(result, safe=True)
        return result
Exemple #11
0
class RepoPublishManager(object):
    def publish(self, repo_id, distributor_id, publish_config_override=None):
        """
        Requests the given distributor publish the repository it is configured
        on.

        The publish operation is executed synchronously in the caller's thread
        and will block until it is completed. The caller must take the necessary
        steps to address the fact that a publish call may be time intensive.

        @param repo_id: identifies the repo being published
        @type  repo_id: str

        @param distributor_id: identifies the repo's distributor to publish
        @type  distributor_id: str

        @param publish_config_override: optional config values to use for this
                                        publish call only
        @type  publish_config_override: dict, None
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_distributor = distributor_coll.find_one({
            'repo_id': repo_id,
            'id': distributor_id
        })
        if repo_distributor is None:
            raise MissingResource(repository=repo_id,
                                  distributor=distributor_id)

        distributor_instance, distributor_config = self._get_distributor_instance_and_config(
            repo_id, distributor_id)

        if distributor_instance is None:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        dispatch_context = dispatch_factory.context()
        dispatch_context.set_cancel_control_hook(
            distributor_instance.cancel_publish_repo)

        # Assemble the data needed for the publish
        conduit = RepoPublishConduit(repo_id, distributor_id)

        call_config = PluginCallConfiguration(distributor_config,
                                              repo_distributor['config'],
                                              publish_config_override)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(
            repo_distributor['distributor_type_id'], repo_id, mkdir=True)

        # Fire events describing the publish state
        fire_manager = manager_factory.event_fire_manager()
        fire_manager.fire_repo_publish_started(repo_id, distributor_id)
        result = self._do_publish(repo, distributor_id, distributor_instance,
                                  transfer_repo, conduit, call_config)
        fire_manager.fire_repo_publish_finished(result)

        dispatch_context.clear_cancel_control_hook()

    def _get_distributor_instance_and_config(self, repo_id, distributor_id):
        repo_distributor_manager = manager_factory.repo_distributor_manager()
        try:
            repo_distributor = repo_distributor_manager.get_distributor(
                repo_id, distributor_id)
            distributor, config = plugin_api.get_distributor_by_id(
                repo_distributor['distributor_type_id'])
        except (MissingResource, plugin_exceptions.PluginNotFound):
            distributor = None
            config = None
        return distributor, config

    def _do_publish(self, repo, distributor_id, distributor_instance,
                    transfer_repo, conduit, call_config):

        distributor_coll = RepoDistributor.get_collection()
        publish_result_coll = RepoPublishResult.get_collection()
        repo_id = repo['id']

        # Perform the publish
        publish_start_timestamp = _now_timestamp()
        try:
            publish_report = distributor_instance.publish_repo(
                transfer_repo, conduit, call_config)
        except Exception, e:
            publish_end_timestamp = _now_timestamp()

            # Reload the distributor in case the scratchpad is set by the plugin
            repo_distributor = distributor_coll.find_one({
                'repo_id': repo_id,
                'id': distributor_id
            })
            repo_distributor['last_publish'] = publish_end_timestamp
            distributor_coll.save(repo_distributor, safe=True)

            # Add a publish history entry for the run
            result = RepoPublishResult.error_result(
                repo_id, repo_distributor['id'],
                repo_distributor['distributor_type_id'],
                publish_start_timestamp, publish_end_timestamp, e,
                sys.exc_info()[2])
            publish_result_coll.save(result, safe=True)

            _LOG.exception(
                _('Exception caught from plugin during publish for repo [%(r)s]'
                  % {'r': repo_id}))
            raise PulpExecutionException(), None, sys.exc_info()[2]

        publish_end_timestamp = _now_timestamp()

        # Reload the distributor in case the scratchpad is set by the plugin
        repo_distributor = distributor_coll.find_one({
            'repo_id': repo_id,
            'id': distributor_id
        })
        repo_distributor['last_publish'] = _now_timestamp()
        distributor_coll.save(repo_distributor, safe=True)

        # Add a publish entry
        if publish_report is not None and isinstance(publish_report,
                                                     PublishReport):
            summary = publish_report.summary
            details = publish_report.details
            if publish_report.success_flag:
                result_code = RepoPublishResult.RESULT_SUCCESS
            else:
                result_code = RepoPublishResult.RESULT_FAILED
        else:
            _LOG.warn(
                'Plugin type [%s] on repo [%s] did not return a valid publish report'
                % (repo_distributor['distributor_type_id'], repo_id))

            summary = details = _('Unknown')
            result_code = RepoPublishResult.RESULT_SUCCESS

        result = RepoPublishResult.expected_result(
            repo_id, repo_distributor['id'],
            repo_distributor['distributor_type_id'], publish_start_timestamp,
            publish_end_timestamp, summary, details, result_code)
        publish_result_coll.save(result, safe=True)
        return result
Exemple #12
0
    publish_end_timestamp = _now_timestamp()

    # Reload the distributor in case the scratchpad is set by the plugin
    repo_distributor = distributor_coll.find_one({'repo_id': repo_obj.repo_id, 'id': dist_id})
    repo_distributor['last_publish'] = datetime.utcnow()
    distributor_coll.save(repo_distributor, safe=True)

    # Add a publish entry
    summary = publish_report.summary
    details = publish_report.details
    _logger.debug('publish succeeded for repo [%s] with distributor ID [%s]' % (
                  repo_obj.repo_id, dist_id))
    result_code = RepoPublishResult.RESULT_SUCCESS
    result = RepoPublishResult.expected_result(
        repo_obj.repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'],
        publish_start_timestamp, publish_end_timestamp, summary, details, result_code)
    publish_result_coll.save(result, safe=True)
    return result


def publish_history(start_date, end_date, repo_id, distributor_id):
    """
    Returns a cursor containing the publish history entries for the given repo and distributor.

    :param start_date: if specified, no events prior to this date will be returned.
    :type  start_date: iso8601 datetime string
    :param end_date: if specified, no events after this date will be returned.
    :type  end_date: iso8601 datetime string
    :param repo_id: identifies the repo
    :type  repo_id: str
Exemple #13
0
    # Reload the distributor in case the scratchpad is set by the plugin
    repo_distributor = distributor_coll.find_one({
        'repo_id': repo_obj.repo_id,
        'id': dist_id
    })
    repo_distributor['last_publish'] = datetime.utcnow()
    distributor_coll.save(repo_distributor, safe=True)

    # Add a publish entry
    summary = publish_report.summary
    details = publish_report.details
    _logger.debug('publish succeeded for repo [%s] with distributor ID [%s]' %
                  (repo_obj.repo_id, dist_id))
    result_code = RepoPublishResult.RESULT_SUCCESS
    result = RepoPublishResult.expected_result(
        repo_obj.repo_id, repo_distributor['id'],
        repo_distributor['distributor_type_id'], publish_start_timestamp,
        publish_end_timestamp, summary, details, result_code)
    publish_result_coll.save(result, safe=True)
    return result


def publish_history(start_date, end_date, repo_id, distributor_id):
    """
    Returns a cursor containing the publish history entries for the given repo and distributor.

    :param start_date: if specified, no events prior to this date will be returned.
    :type  start_date: iso8601 datetime string
    :param end_date: if specified, no events after this date will be returned.
    :type  end_date: iso8601 datetime string
    :param repo_id: identifies the repo
    :type  repo_id: str