def test_to_json(self): # Setup metadata = RepositoryMetadata() metadata.update_from_json(VALID_REPO_METADATA_JSON) # Test serialized = metadata.to_json() # Verify parsed = json.loads(serialized) self.assertEqual(2, len(parsed)) sorted_modules = sorted(parsed, key=lambda x: x["name"]) self.assertEqual(4, len(sorted_modules[0])) self.assertEqual(sorted_modules[0]["name"], "common") self.assertEqual(sorted_modules[0]["author"], "lab42") self.assertEqual(sorted_modules[0]["version"], "0.0.1") self.assertEqual(sorted_modules[0]["tag_list"], []) self.assertEqual(4, len(sorted_modules[1])) self.assertEqual(sorted_modules[1]["name"], "postfix") self.assertEqual(sorted_modules[1]["author"], "lab42") self.assertEqual(sorted_modules[1]["version"], "0.0.2") self.assertEqual(sorted_modules[1]["tag_list"], ["postfix", "applications"])
def test_update_from_json(self): # Test metadata = RepositoryMetadata() metadata.update_from_json(VALID_REPO_METADATA_JSON) # Verify self.assertEqual(2, len(metadata.modules)) for m in metadata.modules: self.assertTrue(isinstance(m, Module)) sorted_modules = sorted(metadata.modules, key=lambda x: x.name) self.assertEqual(sorted_modules[0].name, "common") self.assertEqual(sorted_modules[0].author, "lab42") self.assertEqual(sorted_modules[0].version, "0.0.1") self.assertEqual(sorted_modules[0].tag_list, []) self.assertEqual(sorted_modules[0].description, None) self.assertEqual(sorted_modules[0].project_page, None) self.assertEqual(sorted_modules[0].checksum, "bar") self.assertEqual(sorted_modules[0].checksum_type, "bar_type") self.assertEqual(sorted_modules[1].name, "postfix") self.assertEqual(sorted_modules[1].author, "lab42") self.assertEqual(sorted_modules[1].version, "0.0.2") self.assertEqual(sorted_modules[1].tag_list, ["postfix", "applications"]) self.assertEqual(sorted_modules[1].description, None) self.assertEqual(sorted_modules[1].project_page, None) self.assertEqual(sorted_modules[1].checksum, "foo") self.assertEqual(sorted_modules[1].checksum_type, "foo_type")
def test_to_json(self): # Setup metadata = RepositoryMetadata() metadata.update_from_json(VALID_REPO_METADATA_JSON) # Test serialized = metadata.to_json() # Verify parsed = json.loads(serialized) self.assertEqual(2, len(parsed)) sorted_modules = sorted(parsed, key=lambda x: x['name']) self.assertEqual(4, len(sorted_modules[0])) self.assertEqual(sorted_modules[0]['name'], 'common') self.assertEqual(sorted_modules[0]['author'], 'lab42') self.assertEqual(sorted_modules[0]['version'], '0.0.1') self.assertEqual(sorted_modules[0]['tag_list'], []) self.assertEqual(4, len(sorted_modules[1])) self.assertEqual(sorted_modules[1]['name'], 'postfix') self.assertEqual(sorted_modules[1]['author'], 'lab42') self.assertEqual(sorted_modules[1]['version'], '0.0.2') self.assertEqual(sorted_modules[1]['tag_list'], ['postfix', 'applications'])
def test_update_from_json(self): # Test metadata = RepositoryMetadata() metadata.update_from_json(VALID_REPO_METADATA_JSON) # Verify self.assertEqual(2, len(metadata.modules)) for m in metadata.modules: self.assertTrue(isinstance(m, Module)) sorted_modules = sorted(metadata.modules, key=lambda x: x.name) self.assertEqual(sorted_modules[0].name, 'common') self.assertEqual(sorted_modules[0].author, 'lab42') self.assertEqual(sorted_modules[0].version, '0.0.1') self.assertEqual(sorted_modules[0].tag_list, []) self.assertEqual(sorted_modules[0].description, None) self.assertEqual(sorted_modules[0].project_page, None) self.assertEqual(sorted_modules[0].checksum, 'bar') self.assertEqual(sorted_modules[0].checksum_type, 'bar_type') self.assertEqual(sorted_modules[1].name, 'postfix') self.assertEqual(sorted_modules[1].author, 'lab42') self.assertEqual(sorted_modules[1].version, '0.0.2') self.assertEqual(sorted_modules[1].tag_list, ['postfix', 'applications']) self.assertEqual(sorted_modules[1].description, None) self.assertEqual(sorted_modules[1].project_page, None) self.assertEqual(sorted_modules[1].checksum, 'foo') self.assertEqual(sorted_modules[1].checksum_type, 'foo_type')
def test_retrieve_metadata(self, mock_get_working_dir, mock_finalize): # Test docs = self.downloader.retrieve_metadata(self.mock_progress_report) # Verify self.assertEqual(1, len(docs)) metadata = RepositoryMetadata() metadata.update_from_json(docs[0]) self.assertEqual(2, len(metadata.modules)) self.assertEqual(1, self.mock_progress_report.metadata_query_total_count) self.assertEqual(1, self.mock_progress_report.metadata_query_finished_count) expected_query = os.path.join(VALID_REPO_DIR, constants.REPO_METADATA_FILENAME) self.assertEqual(expected_query, self.mock_progress_report.metadata_current_query) self.assertEqual(2, self.mock_progress_report.update_progress.call_count) mock_finalize.assert_called_once()
def test_retrieve_metadata(self, mock_finalize): # Test docs = self.downloader.retrieve_metadata(self.mock_progress_report) # Verify self.assertEqual(1, len(docs)) metadata = RepositoryMetadata() metadata.update_from_json(docs[0]) self.assertEqual(2, len(metadata.modules)) self.assertEqual(1, self.mock_progress_report.metadata_query_total_count) self.assertEqual( 1, self.mock_progress_report.metadata_query_finished_count) expected_query = os.path.join(VALID_REPO_DIR, constants.REPO_METADATA_FILENAME) self.assertEqual(expected_query, self.mock_progress_report.metadata_current_query) self.assertEqual(2, self.mock_progress_report.update_progress.call_count) mock_finalize.assert_called_once()
def _generate_metadata(self, modules): """ Generates the repository metadata document for all modules in the :param modules: list of modules in the repository; empty list if there are none :type modules: list of pulp_puppet.plugins.db.models.Module """ msg = _('Generating metadata for repository <%(repo_id)s>') msg_dict = {'repo_id': self.repo.repo_id} _logger.info(msg, msg_dict) metadata = RepositoryMetadata() metadata.modules = modules # Write the JSON representation of the metadata to the repository json_metadata = metadata.to_json() build_dir = self._build_dir() metadata_file = os.path.join(build_dir, constants.REPO_METADATA_FILENAME) f = open(metadata_file, 'w') f.write(json_metadata) f.close()
def _parse_metadata(self): """ Takes the necessary actions (according to the run configuration) to retrieve and parse the repository's metadata. This call will return either the successfully parsed metadata or None if it could not be retrieved or parsed. The progress report will be updated with the appropriate description of what went wrong in the event of an error, so the caller should interpret a None return as an error occurring and not continue the sync. :return: object representation of the metadata :rtype: RepositoryMetadata """ msg = _('Beginning metadata retrieval for repository <%(repo_id)s>') msg_dict = {'repo_id': self.repo.id} _logger.info(msg, msg_dict) self.progress_report.metadata_state = STATE_RUNNING self.progress_report.update_progress() start_time = datetime.now() # Retrieve the metadata from the source try: downloader = self._create_downloader() self.downloader = downloader metadata_json_docs = downloader.retrieve_metadata(self.progress_report) except Exception as e: if self._canceled: msg = _('Exception occurred on canceled metadata download: %(exc)s') msg_dict = {'exc': e} _logger.warn(msg, msg_dict) self.progress_report.metadata_state = STATE_CANCELED return None msg = _('Exception while retrieving metadata for repository <%(repo_id)s>') msg_dict = {'repo_id': self.repo.id} _logger.exception(msg, msg_dict) self.progress_report.metadata_state = STATE_FAILED self.progress_report.metadata_error_message = _('Error downloading metadata') self.progress_report.metadata_exception = e self.progress_report.metadata_traceback = sys.exc_info()[2] end_time = datetime.now() duration = end_time - start_time self.progress_report.metadata_execution_time = duration.seconds self.progress_report.update_progress() return None finally: self.downloader = None # Parse the retrieved metadata documents try: metadata = RepositoryMetadata() for doc in metadata_json_docs: metadata.update_from_json(doc) except Exception as e: msg = _('Exception parsing metadata for repository <%(repo_id)s>') msg_dict = {'repo_id': self.repo.id} _logger.exception(msg, msg_dict) self.progress_report.metadata_state = STATE_FAILED msg = _("Error parsing repository modules metadata document") self.progress_report.metadata_error_message = msg self.progress_report.metadata_exception = e self.progress_report.metadata_traceback = sys.exc_info()[2] end_time = datetime.now() duration = end_time - start_time self.progress_report.metadata_execution_time = duration.seconds self.progress_report.update_progress() return None # Last update to the progress report before returning self.progress_report.metadata_state = STATE_SUCCESS end_time = datetime.now() duration = end_time - start_time self.progress_report.metadata_execution_time = duration.seconds self.progress_report.update_progress() return metadata
def _parse_metadata(self): """ Takes the necessary actions (according to the run configuration) to retrieve and parse the repository's metadata. This call will return either the successfully parsed metadata or None if it could not be retrieved or parsed. The progress report will be updated with the appropriate description of what went wrong in the event of an error, so the caller should interpret a None return as an error occurring and not continue the sync. :return: object representation of the metadata :rtype: RepositoryMetadata """ msg = _('Beginning metadata retrieval for repository <%(repo_id)s>') msg_dict = {'repo_id': self.repo.id} _logger.info(msg, msg_dict) self.progress_report.metadata_state = STATE_RUNNING self.progress_report.update_progress() start_time = datetime.now() # Retrieve the metadata from the source try: downloader = self._create_downloader() self.downloader = downloader metadata_json_docs = downloader.retrieve_metadata( self.progress_report) except Exception as e: if self._canceled: msg = _( 'Exception occurred on canceled metadata download: %(exc)s' ) msg_dict = {'exc': e} _logger.warn(msg, msg_dict) self.progress_report.metadata_state = STATE_CANCELED return None msg = _( 'Exception while retrieving metadata for repository <%(repo_id)s>' ) msg_dict = {'repo_id': self.repo.id} _logger.exception(msg, msg_dict) self.progress_report.metadata_state = STATE_FAILED self.progress_report.metadata_error_message = _( 'Error downloading metadata') self.progress_report.metadata_exception = e self.progress_report.metadata_traceback = sys.exc_info()[2] end_time = datetime.now() duration = end_time - start_time self.progress_report.metadata_execution_time = duration.seconds self.progress_report.update_progress() return None finally: self.downloader = None # Parse the retrieved metadata documents try: metadata = RepositoryMetadata() for doc in metadata_json_docs: metadata.update_from_json(doc) except Exception as e: msg = _('Exception parsing metadata for repository <%(repo_id)s>') msg_dict = {'repo_id': self.repo.id} _logger.exception(msg, msg_dict) self.progress_report.metadata_state = STATE_FAILED self.progress_report.metadata_error_message = _( 'Error parsing repository modules metadata document') self.progress_report.metadata_exception = e self.progress_report.metadata_traceback = sys.exc_info()[2] end_time = datetime.now() duration = end_time - start_time self.progress_report.metadata_execution_time = duration.seconds self.progress_report.update_progress() return None # Last update to the progress report before returning self.progress_report.metadata_state = STATE_SUCCESS end_time = datetime.now() duration = end_time - start_time self.progress_report.metadata_execution_time = duration.seconds self.progress_report.update_progress() return metadata