def test_importer_config_to_nectar_config_complete(self): # Setup importer_config = { constants.KEY_SSL_CA_CERT: 'ca_cert', constants.KEY_SSL_VALIDATION: True, constants.KEY_SSL_CLIENT_CERT: 'client_cert', constants.KEY_SSL_CLIENT_KEY: 'client_key', constants.KEY_PROXY_HOST: 'proxy', constants.KEY_PROXY_PORT: 8080, constants.KEY_PROXY_USER: '******', constants.KEY_PROXY_PASS: '******', constants.KEY_MAX_DOWNLOADS: 10, constants.KEY_MAX_SPEED: 1024, } # Test download_config = nectar_config.importer_config_to_nectar_config( importer_config) # Verify self.assertTrue(isinstance(download_config, DownloaderConfig)) self.assertEqual(download_config.ssl_ca_cert, 'ca_cert') self.assertEqual(download_config.ssl_validation, True) self.assertEqual(download_config.ssl_client_cert, 'client_cert') self.assertEqual(download_config.ssl_client_key, 'client_key') self.assertEqual(download_config.proxy_url, 'proxy') self.assertEqual(download_config.proxy_port, 8080) self.assertEqual(download_config.proxy_username, 'user') self.assertEqual(download_config.proxy_password, 'pass') self.assertEqual(download_config.max_concurrent, 10) self.assertEqual(download_config.max_speed, 1024)
def test_importer_config_to_nectar_config_complete(self): # Setup importer_config = { constants.KEY_SSL_CA_CERT : 'ca_cert', constants.KEY_SSL_VALIDATION : True, constants.KEY_SSL_CLIENT_CERT : 'client_cert', constants.KEY_SSL_CLIENT_KEY : 'client_key', constants.KEY_PROXY_HOST : 'proxy', constants.KEY_PROXY_PORT : 8080, constants.KEY_PROXY_USER : '******', constants.KEY_PROXY_PASS : '******', constants.KEY_MAX_DOWNLOADS : 10, constants.KEY_MAX_SPEED : 1024, } # Test download_config = nectar_config.importer_config_to_nectar_config(importer_config) # Verify self.assertTrue(isinstance(download_config, DownloaderConfig)) self.assertEqual(download_config.ssl_ca_cert, 'ca_cert') self.assertEqual(download_config.ssl_validation, True) self.assertEqual(download_config.ssl_client_cert, 'client_cert') self.assertEqual(download_config.ssl_client_key, 'client_key') self.assertEqual(download_config.proxy_url, 'proxy') self.assertEqual(download_config.proxy_port, 8080) self.assertEqual(download_config.proxy_username, 'user') self.assertEqual(download_config.proxy_password, 'pass') self.assertEqual(download_config.max_concurrent, 10) self.assertEqual(download_config.max_speed, 1024)
def _download(self, urls): """ Download files by URL. Encapsulates nectar details and provides a simplified method of downloading files. :param urls: A list of tuples: (url, destination). The *url* and *destination* are both strings. The *destination* is the fully qualified path to where the file is to be downloaded. :type urls: list :return: The nectar reports. Tuple of: (succeeded_reports, failed_reports) :rtype: tuple """ feed_url = self.feed_url() nectar_config = importer_config_to_nectar_config(self.config.flatten()) nectar_class = URL_TO_DOWNLOADER[urlparse(feed_url).scheme] downloader = nectar_class(nectar_config) listener = DownloadListener(self, downloader) request_list = [] for url, destination in urls: request_list.append(DownloadRequest(url, destination)) downloader.download(request_list) nectar_config.finalize() for report in listener.succeeded_reports: _logger.info(FETCH_SUCCEEDED, dict(url=report.url, dst=report.destination)) for report in listener.failed_reports: _logger.error(FETCH_FAILED, dict(url=report.url, msg=report.error_msg)) return listener.succeeded_reports, listener.failed_reports
def __init__(self, repo, sync_conduit, call_config): """ :param repo: metadata describing the repository :type repo: pulp.plugins.model.Repository :param sync_conduit: provides access to relevant Pulp functionality :type sync_conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param call_config: plugin configuration :type call_config: pulp.plugins.config.PluginCallConfiguration """ self.cancelled = False self.working_dir = repo.working_dir self.content_report = ContentReport() self.distribution_report = DistributionReport() self.progress_status = { 'metadata': {'state': 'NOT_STARTED'}, 'content': self.content_report, 'distribution': self.distribution_report, 'errata': {'state': 'NOT_STARTED'}, 'comps': {'state': 'NOT_STARTED'}, } self.sync_conduit = sync_conduit self.set_progress() self.repo = repo self.call_config = call_config flat_call_config = call_config.flatten() self.nectar_config = nectar_utils.importer_config_to_nectar_config(flat_call_config)
def __init__(self, repo=None, conduit=None, config=None): """ This method initializes the SyncStep. It first validates the config to ensure that the required keys are present. It then constructs some needed items (such as a download config), and determines whether the feed URL is a Docker v2 registry or not. If it is, it instantiates child tasks that are appropriate for syncing a v2 registry, and if it is not it raises a NotImplementedError. :param repo: repository to sync :type repo: pulp.plugins.model.Repository :param conduit: sync conduit to use :type conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param config: config object for the sync :type config: pulp.plugins.config.PluginCallConfiguration """ super(SyncStep, self).__init__( step_type=constants.SYNC_STEP_MAIN, repo=repo, conduit=conduit, config=config, plugin_type=constants.IMPORTER_TYPE_ID) self.description = _('Syncing Docker Repository') self._validate(config) download_config = nectar_config.importer_config_to_nectar_config(config.flatten()) upstream_name = config.get(constants.CONFIG_KEY_UPSTREAM_NAME) url = config.get(importer_constants.KEY_FEED) # The DownloadMetadataSteps will set these to a list of Manifests and Blobs self.available_manifests = [] self.available_blobs = [] # Unit keys, populated by v1_sync.GetMetadataStep self.v1_available_units = [] # populated by v1_sync.GetMetadataStep self.v1_tags = {} # Create a Repository object to interact with. self.index_repository = registry.V2Repository( upstream_name, download_config, url, self.get_working_dir()) self.v1_index_repository = registry.V1Repository(upstream_name, download_config, url, self.get_working_dir()) # determine which API versions are supported and add corresponding steps v2_enabled = config.get(constants.CONFIG_KEY_ENABLE_V2, default=True) v1_enabled = config.get(constants.CONFIG_KEY_ENABLE_V1, default=False) if not v2_enabled: _logger.debug(_('v2 API skipped due to config')) if not v1_enabled: _logger.debug(_('v1 API skipped due to config')) v2_found = v2_enabled and self.index_repository.api_version_check() v1_found = v1_enabled and self.v1_index_repository.api_version_check() if v2_found: _logger.debug(_('v2 API found')) self.add_v2_steps(repo, conduit, config) if v1_found: _logger.debug(_('v1 API found')) self.add_v1_steps(repo, config) if not any((v1_found, v2_found)): raise PulpCodedException(error_code=error_codes.DKR1008, registry=url)
def __init__(self, repo, conduit, config): """ :param repo: the repository to sync :type repo: pulp.server.db.model.Repository :param conduit: provides access to relevant Pulp functionality :type conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param config: plugin configuration :type config: pulp.plugins.config.PluginCallConfiguration """ self.cancelled = False self.working_dir = common_utils.get_working_directory() self.content_report = ContentReport() self.distribution_report = DistributionReport() self.progress_report = { 'metadata': { 'state': 'NOT_STARTED' }, 'content': self.content_report, 'distribution': self.distribution_report, 'errata': { 'state': 'NOT_STARTED' }, 'comps': { 'state': 'NOT_STARTED' }, 'purge_duplicates': { 'state': 'NOT_STARTED' }, } self.conduit = conduit self.set_progress() self.repo = repo self.config = config self.nectar_config = nectar_utils.importer_config_to_nectar_config( config.flatten()) self.skip_repomd_steps = False self.current_revision = 0 self.downloader = None self.tmp_dir = None url_modify_config = {} if config.get('query_auth_token'): url_modify_config['query_auth_token'] = config.get( 'query_auth_token') skip_config = self.config.get(constants.CONFIG_SKIP, []) for type_id in ids.QUERY_AUTH_TOKEN_UNSUPPORTED: if type_id not in skip_config: skip_config.append(type_id) self.config.override_config[constants.CONFIG_SKIP] = skip_config _logger.info( _('The following unit types do not support query auth tokens and will be skipped:' ' {skipped_types}').format( skipped_types=ids.QUERY_AUTH_TOKEN_UNSUPPORTED)) self._url_modify = RepoURLModifier(**url_modify_config)
def __init__(self, repo=None, conduit=None, config=None, working_dir=None): """ This method initializes the SyncStep. It first validates the config to ensure that the required keys are present. It then constructs some needed items (such as a download config), and determines whether the feed URL is a Docker v2 registry or not. If it is, it instantiates child tasks that are appropriate for syncing a v2 registry, and if it is not it raises a NotImplementedError. :param repo: repository to sync :type repo: pulp.plugins.model.Repository :param conduit: sync conduit to use :type conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param config: config object for the sync :type config: pulp.plugins.config.PluginCallConfiguration :param working_dir: full path to the directory in which transient files should be stored before being moved into long-term storage. This should be deleted by the caller after step processing is complete. :type working_dir: basestring """ super(SyncStep, self).__init__(constants.SYNC_STEP_MAIN, repo, conduit, config, working_dir, constants.IMPORTER_TYPE_ID) self.description = _('Syncing Docker Repository') self._validate(config) download_config = nectar_config.importer_config_to_nectar_config(config.flatten()) upstream_name = config.get(constants.CONFIG_KEY_UPSTREAM_NAME) url = config.get(importer_constants.KEY_FEED) # The GetMetadataStep will set this to a list of dictionaries of the form # {'digest': digest}. self.available_units = [] # Create a Repository object to interact with. self.index_repository = registry.V2Repository( upstream_name, download_config, url, working_dir) # We'll attempt to use a V2Repository's API version check call to find out if it is a V2 # registry. This will raise a NotImplementedError if url is not determined to be a Docker v2 # registry. self.index_repository.api_version_check() self.step_get_metadata = GetMetadataStep(repo=repo, conduit=conduit, config=config, working_dir=working_dir) self.add_child(self.step_get_metadata) # save this step so its "units_to_download" attribute can be accessed later self.step_get_local_units = GetLocalBlobsStep(constants.IMPORTER_TYPE_ID) self.add_child(self.step_get_local_units) self.add_child( DownloadStep( constants.SYNC_STEP_DOWNLOAD, downloads=self.generate_download_requests(), repo=self.repo, config=self.config, working_dir=self.working_dir, description=_('Downloading remote files'))) self.add_child(SaveUnitsStep(self.working_dir))
def __init__(self, repo, conduit, config): """ :param repo: the repository to sync :type repo: pulp.server.db.model.Repository :param conduit: provides access to relevant Pulp functionality :type conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param config: plugin configuration :type config: pulp.plugins.config.PluginCallConfiguration """ self.working_dir = common_utils.get_working_directory() self.content_report = ContentReport() self.distribution_report = DistributionReport() self.progress_report = { 'metadata': {'state': 'NOT_STARTED'}, 'content': self.content_report, 'distribution': self.distribution_report, 'errata': {'state': 'NOT_STARTED'}, 'comps': {'state': 'NOT_STARTED'}, 'purge_duplicates': {'state': 'NOT_STARTED'}, } self.conduit = conduit self.set_progress() self.repo = repo self.config = config self.nectar_config = nectar_utils.importer_config_to_nectar_config(config.flatten()) self.skip_repomd_steps = False self.current_revision = 0 self.downloader = None self.tmp_dir = None # Was any repo metadata found? Includes either yum metadata or a treeinfo file. If this is # False at the end of the sync, then an error will be presented to the user. self.metadata_found = False # Store the reason that yum repo metadata was not found. In case a treeinfo file is also # not found, this error will be the one presented to the user. That preserves pre-existing # behavior that is yum-centric. self.repomd_not_found_reason = '' url_modify_config = {} if config.get('query_auth_token'): url_modify_config['query_auth_token'] = config.get('query_auth_token') skip_config = self.config.get(constants.CONFIG_SKIP, []) for type_id in ids.QUERY_AUTH_TOKEN_UNSUPPORTED: if type_id not in skip_config: skip_config.append(type_id) self.config.override_config[constants.CONFIG_SKIP] = skip_config _logger.info( _('The following unit types do not support query auth tokens and will be skipped:' ' {skipped_types}').format(skipped_types=ids.QUERY_AUTH_TOKEN_UNSUPPORTED) ) self._url_modify = RepoURLModifier(**url_modify_config)
def __init__(self, repo=None, conduit=None, config=None): """ This method initializes the SyncStep. It first validates the config to ensure that the required keys are present. It then constructs some needed items (such as a download config), and determines whether the feed URL is a Docker v2 registry or not. If it is, it instantiates child tasks that are appropriate for syncing a v2 registry, and if it is not it raises a NotImplementedError. :param repo: repository to sync :type repo: pulp.plugins.model.Repository :param conduit: sync conduit to use :type conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param config: config object for the sync :type config: pulp.plugins.config.PluginCallConfiguration """ super(SyncStep, self).__init__( step_type=constants.SYNC_STEP_MAIN, repo=repo, conduit=conduit, config=config, plugin_type=constants.IMPORTER_TYPE_ID) self.description = _('Syncing Docker Repository') self._validate(config) download_config = nectar_config.importer_config_to_nectar_config(config.flatten()) upstream_name = config.get(constants.CONFIG_KEY_UPSTREAM_NAME) url = config.get(importer_constants.KEY_FEED) # The DownloadMetadataSteps will set these to a list of Manifests and Blobs self.available_manifests = [] self.available_blobs = [] # Create a Repository object to interact with. self.index_repository = registry.V2Repository( upstream_name, download_config, url, self.get_working_dir()) # We'll attempt to use a V2Repository's API version check call to find out if it is a V2 # registry. This will raise a NotImplementedError if url is not determined to be a Docker v2 # registry. self.index_repository.api_version_check() self.add_child(DownloadManifestsStep(repo=repo, conduit=conduit, config=config)) # save these steps so their "units_to_download" attributes can be accessed later. We want # them to be separate steps because we have already downloaded all the Manifests but should # only save the new ones, while needing to go download the missing Blobs. Thus they must be # handled separately. self.step_get_local_manifests = publish_step.GetLocalUnitsStep( importer_type=constants.IMPORTER_TYPE_ID, available_units=self.available_manifests) self.step_get_local_blobs = publish_step.GetLocalUnitsStep( importer_type=constants.IMPORTER_TYPE_ID, available_units=self.available_blobs) self.add_child(self.step_get_local_manifests) self.add_child(self.step_get_local_blobs) self.add_child( publish_step.DownloadStep( step_type=constants.SYNC_STEP_DOWNLOAD, downloads=self.generate_download_requests(), repo=self.repo, config=self.config, description=_('Downloading remote files'))) self.add_child(SaveUnitsStep())
def _downloader(self, config): """ Get a configured downloader. The integration between the importer configuration and the download package happens here. The https downloader may be used for both http and https so always chosen for simplicity. :param config: The importer configuration. :param config: pulp.plugins.config.PluginCallConfiguration :return: A configured downloader :rtype: nectar.downloaders.base.Downloader """ configuration = importer_config_to_nectar_config(config.flatten()) downloader = Downloader(configuration) return downloader
def __init__(self, repo=None, conduit=None, config=None, working_dir=None): """ :param repo: repository to sync :type repo: pulp.plugins.model.Repository :param conduit: sync conduit to use :type conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param config: config object for the sync :type config: pulp.plugins.config.PluginCallConfiguration :param working_dir: full path to the directory in which transient files should be stored before being moved into long-term storage. This should be deleted by the caller after step processing is complete. :type working_dir: basestring """ super(SyncStep, self).__init__(constants.SYNC_STEP_MAIN, repo, conduit, config, working_dir, constants.IMPORTER_TYPE_ID) self.description = _('Syncing Docker Repository') # Unit keys, populated by GetMetadataStep self.available_units = [] # populated by GetMetadataStep self.tags = {} self.validate(config) download_config = nectar_config.importer_config_to_nectar_config( config.flatten()) upstream_name = config.get(constants.CONFIG_KEY_UPSTREAM_NAME) url = config.get(importer_constants.KEY_FEED) # create a Repository object to interact with self.index_repository = Repository(upstream_name, download_config, url, working_dir) self.add_child(GetMetadataStep(working_dir=working_dir)) # save this step so its "units_to_download" attribute can be accessed later self.step_get_local_units = GetLocalImagesStep( constants.IMPORTER_TYPE_ID, constants.IMAGE_TYPE_ID, ['image_id'], working_dir) self.add_child(self.step_get_local_units) self.add_child( DownloadStep(constants.SYNC_STEP_DOWNLOAD, downloads=self.generate_download_requests(), repo=repo, config=config, working_dir=working_dir, description=_('Downloading remote files'))) self.add_child(SaveUnits(working_dir))
def __init__(self, repo, sync_conduit, call_config): """ :param repo: the repository to sync :type repo: pulp.server.db.model.Repository :param sync_conduit: provides access to relevant Pulp functionality :type sync_conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param call_config: plugin configuration :type call_config: pulp.plugins.config.PluginCallConfiguration """ self.cancelled = False self.working_dir = common_utils.get_working_directory() self.content_report = ContentReport() self.distribution_report = DistributionReport() self.progress_status = { 'metadata': {'state': 'NOT_STARTED'}, 'content': self.content_report, 'distribution': self.distribution_report, 'errata': {'state': 'NOT_STARTED'}, 'comps': {'state': 'NOT_STARTED'}, } self.sync_conduit = sync_conduit self.set_progress() self.repo = repo self.call_config = call_config flat_call_config = call_config.flatten() self.nectar_config = nectar_utils.importer_config_to_nectar_config(flat_call_config) self.skip_repomd_steps = False self.current_revision = 0 url_modify_config = {} if call_config.get('query_auth_token'): url_modify_config['query_auth_token'] = call_config.get('query_auth_token') skip_config = self.call_config.get(constants.CONFIG_SKIP, []) for type_id in ids.QUERY_AUTH_TOKEN_UNSUPPORTED: if type_id not in skip_config: skip_config.append(type_id) self.call_config.override_config[constants.CONFIG_SKIP] = skip_config _logger.info( _('The following unit types do not support query auth tokens and will be skipped:' ' {skipped_types}').format(skipped_types=ids.QUERY_AUTH_TOKEN_UNSUPPORTED) ) self._url_modify = utils.RepoURLModifier(**url_modify_config)
def test_importer_config_to_download_config_partial(self): # Setup importer_config = { constants.KEY_SSL_CA_CERT : 'ca_cert', constants.KEY_PROXY_HOST : 'proxy', constants.KEY_MAX_DOWNLOADS : 10, } # Test download_config = nectar_config.importer_config_to_nectar_config(importer_config) # Verify self.assertEqual(download_config.ssl_ca_cert, 'ca_cert') self.assertEqual(download_config.proxy_url, 'proxy') self.assertEqual(download_config.max_concurrent, 10) self.assertEqual(download_config.proxy_username, None) # spot check
def test_importer_config_to_download_config_partial(self): # Setup importer_config = { constants.KEY_SSL_CA_CERT: 'ca_cert', constants.KEY_PROXY_HOST: 'proxy', constants.KEY_MAX_DOWNLOADS: 10, } # Test download_config = nectar_config.importer_config_to_nectar_config(importer_config) # Verify self.assertEqual(download_config.ssl_ca_cert, 'ca_cert') self.assertEqual(download_config.proxy_url, 'proxy') self.assertEqual(download_config.max_concurrent, 10) self.assertEqual(download_config.proxy_username, None) # spot check
def __init__(self, repo=None, conduit=None, config=None, working_dir=None): """ :param repo: repository to sync :type repo: pulp.plugins.model.Repository :param conduit: sync conduit to use :type conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param config: config object for the sync :type config: pulp.plugins.config.PluginCallConfiguration :param working_dir: full path to the directory in which transient files should be stored before being moved into long-term storage. This should be deleted by the caller after step processing is complete. :type working_dir: basestring """ super(SyncStep, self).__init__(constants.SYNC_STEP_MAIN, repo, conduit, config, working_dir, constants.IMPORTER_TYPE_ID) self.description = _('Syncing Docker Repository') # Unit keys, populated by GetMetadataStep self.available_units = [] # populated by GetMetadataStep self.tags = {} self.validate(config) download_config = nectar_config.importer_config_to_nectar_config(config.flatten()) upstream_name = config.get(constants.CONFIG_KEY_UPSTREAM_NAME) url = config.get(importer_constants.KEY_FEED) # create a Repository object to interact with self.index_repository = Repository(upstream_name, download_config, url, working_dir) self.add_child(GetMetadataStep(working_dir=working_dir)) # save this step so its "units_to_download" attribute can be accessed later self.step_get_local_units = GetLocalImagesStep(constants.IMPORTER_TYPE_ID, constants.IMAGE_TYPE_ID, ['image_id'], working_dir) self.add_child(self.step_get_local_units) self.add_child(DownloadStep(constants.SYNC_STEP_DOWNLOAD, downloads=self.generate_download_requests(), repo=repo, config=config, working_dir=working_dir, description=_('Downloading remote files'))) self.add_child(SaveUnits(working_dir))
def get_downloader(config, url, working_dir=None, **options): """ DEPRECATED. Use get_downloader_for_db_importer instead. Get a configured downloader. :param config: A plugin configuration. :type config: pulp.plugins.config.PluginCallConfiguration :param url: A URL. :type url: str :param working_dir: Allow the caller to override the working directory used. :type working_dir: str :param options: Extended configuration. :type options: dict :return: A configured downloader. :rtype: nectar.downloaders.base.Downloader :raise ValueError: when the URL scheme is not supported. """ nectar_config = importer_config_to_nectar_config(config.flatten(), working_dir=working_dir) return Importer.build_downloader(url, nectar_config)
def retrieve_metadata(self, progress_report): """ Retrieves all metadata documents needed to fulfill the configuration set for the repository. The progress report will be updated as the downloads take place. :param progress_report: used to communicate the progress of this operation :type progress_report: pulp_puppet.importer.sync_progress.ProgressReport :return: list of JSON documents describing all modules to import :rtype: list """ feed = self.config.get(constants.CONFIG_FEED) source_dir = feed[len('file://'):] metadata_filename = os.path.join(source_dir, constants.REPO_METADATA_FILENAME) # Only do one query for this implementation progress_report.metadata_query_finished_count = 0 progress_report.metadata_query_total_count = 1 progress_report.metadata_current_query = metadata_filename progress_report.update_progress() config = importer_config_to_nectar_config(self.config.flatten()) listener = LocalMetadataDownloadEventListener(progress_report) self.downloader = LocalFileDownloader(config, listener) url = os.path.join(feed, constants.REPO_METADATA_FILENAME) destination = StringIO() request = DownloadRequest(url, destination) self.downloader.download([request]) config.finalize() self.downloader = None for report in listener.failed_reports: raise FileRetrievalException(report.error_msg) return [destination.getvalue()]
def initialize(self): """ Set up the nectar downloader Originally based on the ISO sync setup """ config = self.get_config() self._validate_downloads = config.get(importer_constants.KEY_VALIDATE, default=True) self._repo_url = encode_unicode(config.get(importer_constants.KEY_FEED)) # The _repo_url must end in a trailing slash, because we will use # urljoin to determine the path later if self._repo_url[-1] != '/': self._repo_url = self._repo_url + '/' downloader_config = importer_config_to_nectar_config(config.flatten()) # We will pass self as the event_listener, so that we can receive the # callbacks in this class if self._repo_url.lower().startswith('file'): self.downloader = LocalFileDownloader(downloader_config, self) else: self.downloader = HTTPThreadedDownloader(downloader_config, self)
def get_downloader(config, url, **options): """ Get a configured downloader. :param config: A plugin configuration. :type config: pulp.plugins.config.PluginCallConfiguration :param url: A URL. :type url: str :param options: Extended configuration. :type options: dict :return: A configured downloader. :rtype: nectar.downloaders.base.Downloader :raise ValueError: when the URL scheme is not supported. """ url = urlparse(url) nectar_config = importer_config_to_nectar_config(config.flatten()) scheme = url.scheme.lower() if scheme == 'file': return LocalFileDownloader(nectar_config) if scheme in ('http', 'https'): return HTTPThreadedDownloader(nectar_config) raise ValueError(_('Scheme "{s}" not supported').format(s=url.scheme))
def __init__(self, repo, sync_conduit, call_config): """ :param repo: metadata describing the repository :type repo: pulp.plugins.model.Repository :param sync_conduit: provides access to relevant Pulp functionality :type sync_conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param call_config: plugin configuration :type call_config: pulp.plugins.config.PluginCallConfiguration """ self.cancelled = False self.working_dir = repo.working_dir self.content_report = ContentReport() self.distribution_report = DistributionReport() self.progress_status = { 'metadata': { 'state': 'NOT_STARTED' }, 'content': self.content_report, 'distribution': self.distribution_report, 'errata': { 'state': 'NOT_STARTED' }, 'comps': { 'state': 'NOT_STARTED' }, } self.sync_conduit = sync_conduit self.set_progress() self.repo = repo self.call_config = call_config flat_call_config = call_config.flatten() self.nectar_config = nectar_utils.importer_config_to_nectar_config( flat_call_config) self.skip_repomd_steps = False self.current_revision = 0
def __init__(self, repo=None, conduit=None, config=None, **kwargs): """ :param repo: repository to sync :type repo: pulp.plugins.model.Repository :param conduit: sync conduit to use :type conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param config: config object for the sync :type config: pulp.plugins.config.PluginCallConfiguration """ super(SyncStep, self).__init__(constants.SYNC_STEP_MAIN, repo=repo, conduit=conduit, config=config, plugin_type=constants.IMPORTER_TYPE_ID, **kwargs) self.description = _('Syncing Docker Repository') # Unit keys, populated by GetMetadataStep self.available_units = [] # populated by GetMetadataStep self.tags = {} self.validate(config) download_config = nectar_config.importer_config_to_nectar_config(config.flatten()) upstream_name = config.get(constants.CONFIG_KEY_UPSTREAM_NAME) url = config.get(importer_constants.KEY_FEED) # create a Repository object to interact with self.index_repository = Repository(upstream_name, download_config, url, self.get_working_dir()) self.add_child(GetMetadataStep()) # save this step so its "units_to_download" attribute can be accessed later self.step_get_local_units = GetLocalUnitsStep(constants.IMPORTER_TYPE_ID) self.add_child(self.step_get_local_units) self.add_child(DownloadStep(constants.SYNC_STEP_DOWNLOAD, downloads=self.generate_download_requests(), repo=repo, config=config, description=_('Downloading remote files'))) self.add_child(SaveDockerUnits())
def _create_and_configure_downloader(self, listener): config = importer_config_to_nectar_config(self.config.flatten()) return HTTPThreadedDownloader(config, listener)