Ejemplo n.º 1
0
    def _import_modules(self, inventory, module_paths):
        """
        Import the puppet modules (tarballs) at the specified paths.

        :param inventory: A module inventory object.
        :type inventory: Inventory
        :param module_paths: A list of paths to puppet module files.
        :type module_paths: list
        :return: A list of the imported module unit keys.
        :rtype: list
        """
        imported_modules = []
        for module_path in module_paths:
            if self.canceled:
                return []
            puppet_manifest = self._extract_metadata(module_path)
            module = Module.from_json(puppet_manifest)
            if inventory.already_associated(module):
                # Decrement the total number of modules we're importing
                self.report.modules_total_count -= 1
                continue
            _LOG.info(IMPORT_MODULE % dict(mod=module_path))
            imported_modules.append(module.unit_key())
            self._add_module(module_path, module)
            self.report.modules_finished_count += 1
            self.report.update_progress()

        # Write the report, making sure we don't overwrite the a failure in _fetch_modules
        if self.report.modules_state != constants.STATE_FAILED:
            self.report.modules_state = constants.STATE_SUCCESS
        self.report.modules_execution_time = time() - self.started_fetch_modules
        self.report.update_progress()

        return imported_modules
Ejemplo n.º 2
0
def handle_uploaded_unit(repo, type_id, unit_key, metadata, file_path,
                         conduit):
    """
    Handles an upload unit request to the importer. This call is responsible
    for moving the unit from its temporary location where Pulp stored the
    upload to the final storage location (as dictated by Pulp) for the unit.
    This call will also update the database in Pulp to reflect the unit
    and its association to the repository.

    :param repo: repository into which the unit is being uploaded
    :type  repo: pulp.plugins.model.Repository
    :param type_id: type of unit being uploaded
    :type  type_id: str
    :param unit_key: unique identifier for the unit
    :type  unit_key: dict
    :param metadata: extra data about the unit
    :type  metadata: dict
    :param file_path: temporary location of the uploaded file
    :type  file_path: str
    :param conduit: for calls back into Pulp
    :type  conduit: pulp.plugins.conduit.upload.UploadConduit
    """

    if type_id != constants.TYPE_PUPPET_MODULE:
        raise NotImplementedError()

    # Create a module with unit_key if supplied
    initial_module = None
    if unit_key:
        initial_module = Module.from_dict(unit_key)

    # Extract the metadata from the module
    extracted_data = metadata_parser.extract_metadata(file_path,
                                                      repo.working_dir,
                                                      initial_module)
    checksum = metadata_parser.calculate_checksum(file_path)

    # Create a module from the metadata
    module = Module.from_json(extracted_data)
    module.checksum = checksum

    # Create the Pulp unit
    type_id = constants.TYPE_PUPPET_MODULE
    unit_key = module.unit_key()
    unit_metadata = module.unit_metadata()
    relative_path = constants.STORAGE_MODULE_RELATIVE_PATH % module.filename()

    unit = conduit.init_unit(type_id, unit_key, unit_metadata, relative_path)

    # Copy from the upload temporary location into where Pulp wants it to live
    shutil.copy(file_path, unit.storage_path)

    # Save the unit into the destination repository
    conduit.save_unit(unit)

    return {'success_flag': True, 'summary': '', 'details': {}}
Ejemplo n.º 3
0
def handle_uploaded_unit(repo, type_id, unit_key, metadata, file_path, conduit):
    """
    Handles an upload unit request to the importer. This call is responsible
    for moving the unit from its temporary location where Pulp stored the
    upload to the final storage location (as dictated by Pulp) for the unit.
    This call will also update the database in Pulp to reflect the unit
    and its association to the repository.

    :param repo: repository into which the unit is being uploaded
    :type  repo: pulp.plugins.model.Repository
    :param type_id: type of unit being uploaded
    :type  type_id: str
    :param unit_key: unique identifier for the unit
    :type  unit_key: dict
    :param metadata: extra data about the unit
    :type  metadata: dict
    :param file_path: temporary location of the uploaded file
    :type  file_path: str
    :param conduit: for calls back into Pulp
    :type  conduit: pulp.plugins.conduit.upload.UploadConduit
    """

    if type_id != constants.TYPE_PUPPET_MODULE:
        raise NotImplementedError()

    # Create a module with unit_key if supplied
    initial_module = None
    if unit_key:
        initial_module = Module.from_dict(unit_key)

    # Extract the metadata from the module
    extracted_data = metadata_parser.extract_metadata(file_path, repo.working_dir, initial_module)
    checksum = metadata_parser.calculate_checksum(file_path)

    # Create a module from the metadata
    module = Module.from_json(extracted_data)
    module.checksum = checksum

    # Create the Pulp unit
    type_id = constants.TYPE_PUPPET_MODULE
    unit_key = module.unit_key()
    unit_metadata = module.unit_metadata()
    relative_path = constants.STORAGE_MODULE_RELATIVE_PATH % module.filename()

    unit = conduit.init_unit(type_id, unit_key, unit_metadata, relative_path)

    # Copy from the upload temporary location into where Pulp wants it to live
    shutil.copy(file_path, unit.storage_path)

    # Save the unit into the destination repository
    conduit.save_unit(unit)

    return {'success_flag': True, 'summary': '', 'details': {}}
Ejemplo n.º 4
0
    def test_from_json(self):
        # Setup
        data = json.loads(VALID_MODULE_METADATA_JSON)

        # Test
        module = Module.from_json(data)

        # Verify
        self.assertEqual(module.name, "valid")
        self.assertEqual(module.author, "jdob")

        module.name = "jdob-valid" # rename the module to use the assert
        self.assert_valid_module(module)
Ejemplo n.º 5
0
    def test_from_json(self):
        # Setup
        data = json.loads(VALID_MODULE_METADATA_JSON)

        # Test
        module = Module.from_json(data)

        # Verify
        self.assertEqual(module.name, "valid")
        self.assertEqual(module.author, "jdob")

        module.name = "jdob-valid"  # rename the module to use the assert
        self.assert_valid_module(module)
Ejemplo n.º 6
0
    def test_extract_metadata(self):
        # Setup
        filename = os.path.join(self.module_dir, self.module.filename())

        # Test
        metadata_json = metadata.extract_metadata(filename, self.tmp_dir)
        self.module = Module.from_json(metadata_json)

        # Verify
        self.assertEqual(self.module.name, 'valid')
        self.assertEqual(self.module.version, '1.0.0')
        self.assertEqual(self.module.author, 'jdob')

        self._assert_test_module_metadata()
Ejemplo n.º 7
0
    def test_extract_metadata(self):
        # Setup
        filename = os.path.join(self.module_dir, self.module.filename())

        # Test
        metadata_json = metadata.extract_metadata(filename, self.tmp_dir, self.module)
        self.module = Module.from_json(metadata_json)

        # Verify
        self.assertEqual(self.module.name, 'valid')
        self.assertEqual(self.module.version, '1.0.0')
        self.assertEqual(self.module.author, 'jdob')

        self._assert_test_module_metadata()
Ejemplo n.º 8
0
    def test_extract_metadata_no_module(self, mkdtemp):
        # Setup
        filename = os.path.join(self.module_dir, self.module.filename())
        extraction_dir = os.path.join(self.tmp_dir, "1234")
        mkdtemp.return_value = extraction_dir

        metadata_json = metadata.extract_metadata(filename, self.tmp_dir)
        self.module = Module.from_json(metadata_json)

        # Verify
        self.assertEqual(self.module.name, 'valid')
        self.assertEqual(self.module.version, '1.0.0')
        self.assertEqual(self.module.author, 'jdob')

        self._assert_test_module_metadata()

        self.assertTrue(not os.path.exists(extraction_dir))
Ejemplo n.º 9
0
    def _import_modules(self, module_paths):
        """
        Import the puppet modules (tarballs) at the specified paths. This will also handle
        removing any modules in the local repository if they are no longer present on remote
        repository and the 'remove_missing' config value is True.

        :param module_paths: A list of paths to puppet module files.
        :type module_paths: list
        """
        criteria = UnitAssociationCriteria(
            type_ids=[constants.TYPE_PUPPET_MODULE],
            unit_fields=Module.UNIT_KEY_NAMES)
        local_units = self.conduit.get_units(criteria=criteria)
        local_unit_keys = [unit.unit_key for unit in local_units]
        remote_unit_keys = []

        for module_path in module_paths:
            if self.canceled:
                return
            puppet_manifest = self._extract_metadata(module_path)
            module = Module.from_json(puppet_manifest)
            remote_unit_keys.append(module.unit_key())

            # Even though we've already basically processed this unit, not doing this makes the
            # progress reporting confusing because it shows Pulp always importing all the modules.
            if module.unit_key() in local_unit_keys:
                self.report.modules_total_count -= 1
                continue
            _logger.debug(IMPORT_MODULE % dict(mod=module_path))
            self._add_module(module_path, module)
            self.report.modules_finished_count += 1
            self.report.update_progress()

        # Write the report, making sure we don't overwrite a failure in _fetch_modules
        if self.report.modules_state not in constants.COMPLETE_STATES:
            self.report.modules_state = constants.STATE_SUCCESS
        self.report.modules_execution_time = time(
        ) - self.started_fetch_modules
        self.report.update_progress()

        remove_missing = self.config.get_boolean(
            constants.CONFIG_REMOVE_MISSING)
        if remove_missing is None:
            remove_missing = constants.DEFAULT_REMOVE_MISSING
        if remove_missing:
            self._remove_missing(local_units, remote_unit_keys)
Ejemplo n.º 10
0
    def test_from_json_old_name(self):
        """
        Test that the Module.from_json method handles the old naming style
        """
        # Setup
        metadata = {
            'name': 'oldauthor/oldmodule',
            'version': '0.1.0',
        }

        # Test
        module = Module.from_json(metadata)

        # Verify
        self.assertEqual(module.author, 'oldauthor')
        self.assertEqual(module.name, 'oldmodule')
        self.assertEqual(module.version, '0.1.0')
Ejemplo n.º 11
0
    def test_extract_metadata_no_module(self, mkdtemp):
        # Setup
        filename = os.path.join(self.module_dir, self.module.filename())
        extraction_dir = os.path.join(self.tmp_dir, "1234")
        mkdtemp.return_value = extraction_dir

        metadata_json = metadata.extract_metadata(filename, self.tmp_dir)
        self.module = Module.from_json(metadata_json)

        # Verify
        self.assertEqual(self.module.name, 'valid')
        self.assertEqual(self.module.version, '1.0.0')
        self.assertEqual(self.module.author, 'jdob')

        self._assert_test_module_metadata()

        self.assertTrue(not os.path.exists(extraction_dir))
Ejemplo n.º 12
0
    def test_from_json_old_name(self):
        """
        Test that the Module.from_json method handles the old naming style
        """
        # Setup
        metadata = {
            'name': 'oldauthor/oldmodule',
            'version': '0.1.0',
        }

        # Test
        module = Module.from_json(metadata)

        # Verify
        self.assertEqual(module.author, 'oldauthor')
        self.assertEqual(module.name, 'oldmodule')
        self.assertEqual(module.version, '0.1.0')
Ejemplo n.º 13
0
    def _import_modules(self, module_paths):
        """
        Import the puppet modules (tarballs) at the specified paths. This will also handle
        removing any modules in the local repository if they are no longer present on remote
        repository and the 'remove_missing' config value is True.

        :param module_paths: A list of paths to puppet module files.
        :type module_paths: list
        """
        criteria = UnitAssociationCriteria(type_ids=[constants.TYPE_PUPPET_MODULE],
                                           unit_fields=Module.UNIT_KEY_NAMES)
        local_units = self.conduit.get_units(criteria=criteria)
        local_unit_keys = [unit.unit_key for unit in local_units]
        remote_unit_keys = []

        for module_path in module_paths:
            if self.canceled:
                return
            puppet_manifest = self._extract_metadata(module_path)
            module = Module.from_json(puppet_manifest)
            remote_unit_keys.append(module.unit_key())

            # Even though we've already basically processed this unit, not doing this makes the
            # progress reporting confusing because it shows Pulp always importing all the modules.
            if module.unit_key() in local_unit_keys:
                self.report.modules_total_count -= 1
                continue
            _logger.debug(IMPORT_MODULE % dict(mod=module_path))
            self._add_module(module_path, module)
            self.report.modules_finished_count += 1
            self.report.update_progress()

        # Write the report, making sure we don't overwrite a failure in _fetch_modules
        if self.report.modules_state not in constants.COMPLETE_STATES:
            self.report.modules_state = constants.STATE_SUCCESS
        self.report.modules_execution_time = time() - self.started_fetch_modules
        self.report.update_progress()

        remove_missing = self.config.get_boolean(constants.CONFIG_REMOVE_MISSING)
        if remove_missing is None:
            remove_missing = constants.DEFAULT_REMOVE_MISSING
        if remove_missing:
            self._remove_missing(local_units, remote_unit_keys)
Ejemplo n.º 14
0
    def _add_new_module(self, downloader, module):
        """
        Performs the tasks for downloading and saving a new unit in Pulp.

        :param downloader: downloader instance to use for retrieving the unit
        :param module: module instance to download
        :type  module: Module
        """
        # Initialize the unit in Pulp
        type_id = constants.TYPE_PUPPET_MODULE
        unit_key = module.unit_key()
        unit_metadata = {}  # populated later but needed for the init call
        relative_path = constants.STORAGE_MODULE_RELATIVE_PATH % module.filename(
        )

        unit = self.sync_conduit.init_unit(type_id, unit_key, unit_metadata,
                                           relative_path)

        try:
            if not self._module_exists(unit.storage_path):
                # Download the bits
                downloaded_filename = downloader.retrieve_module(
                    self.progress_report, module)

                # Copy them to the final location
                shutil.copy(downloaded_filename, unit.storage_path)

            # Extract the extra metadata into the module
            metadata_json = metadata_module.extract_metadata(
                unit.storage_path, self.repo.working_dir, module)
            module = Module.from_json(metadata_json)

            # Update the unit with the extracted metadata
            unit.metadata = module.unit_metadata()

            # Save the unit and associate it to the repository
            self.sync_conduit.save_unit(unit)
        finally:
            # Clean up the temporary module
            downloader.cleanup_module(module)
Ejemplo n.º 15
0
    def _add_new_module(self, downloader, module):
        """
        Performs the tasks for downloading and saving a new unit in Pulp.

        :param downloader: downloader instance to use for retrieving the unit
        :param module: module instance to download
        :type  module: Module
        """
        # Initialize the unit in Pulp
        type_id = constants.TYPE_PUPPET_MODULE
        unit_key = module.unit_key()
        unit_metadata = {}  # populated later but needed for the init call
        relative_path = constants.STORAGE_MODULE_RELATIVE_PATH % module.filename()

        unit = self.sync_conduit.init_unit(type_id, unit_key, unit_metadata,
                                           relative_path)

        try:
            if not self._module_exists(unit.storage_path):
                # Download the bits
                downloaded_filename = downloader.retrieve_module(self.progress_report, module)

                # Copy them to the final location
                shutil.copy(downloaded_filename, unit.storage_path)

            # Extract the extra metadata into the module
            metadata_json = metadata_module.extract_metadata(unit.storage_path, self.repo.working_dir)
            module = Module.from_json(metadata_json)

            # Update the unit with the extracted metadata
            unit.metadata = module.unit_metadata()

            # Save the unit and associate it to the repository
            self.sync_conduit.save_unit(unit)
        finally:
            # Clean up the temporary module
            downloader.cleanup_module(module)