def test_get_destination_filepath_errors(mocker, has_missing_name, is_bad_config_item): """ get_destination_filepath should log an error and return None if the site config is missing the given name, or if the config item does not have a properly configured destination. """ patched_log = mocker.patch("content_sync.utils.log") # From basic-site-config.yml config_item_name = "blog" if is_bad_config_item: mocker.patch.object( SiteConfig, "find_item_by_name", return_value=ConfigItem(item={ "name": config_item_name, "poorly": "configured" }), ) starter = WebsiteStarterFactory.build() content = WebsiteContentFactory.build( is_page_content=False, type="non-existent-config-name" if has_missing_name else config_item_name, ) return_value = get_destination_filepath(content=content, site_config=SiteConfig( starter.config)) patched_log.error.assert_called_once() assert return_value is None
def test_get_destination_filepath(is_page_content, dirpath, filename, expected): """get_destination_filepath should create the filepath for a piece of content""" content = WebsiteContentFactory.create(is_page_content=is_page_content, dirpath=dirpath, filename=filename) assert (get_destination_filepath( content, SiteConfig(content.website.starter.config)) == expected)
def test_delete_orphaned_content_in_backend(github): """ delete_orphaned_content_in_backend should call batch_delete_files with correct paths""" prior_path = "content/old/pages/1.md" content = github.backend.website.websitecontent_set.all() ContentSyncState.objects.filter(content=content[2]).update( data={GIT_DATA_FILEPATH: prior_path}) paths_to_delete = ["content/nomatch/1.md", "content/nomatch/2.md"] github.api.site_config = SiteConfig(github.backend.website.starter.config) github.api.get_all_file_paths.return_value = iter([ get_destination_filepath(content[0], github.backend.api.site_config), get_destination_filepath(content[1], github.backend.api.site_config), prior_path, *paths_to_delete, ]) github.backend.delete_orphaned_content_in_backend() github.api.get_all_file_paths.assert_called_once() github.api.batch_delete_files.assert_called_once_with([*paths_to_delete])
def upsert_content_files_for_user(self, user_id=None) -> Optional[Commit]: """ Upsert multiple WebsiteContent objects to github in one commit """ unsynced_states = ContentSyncState.objects.filter( Q(content__website=self.website) & Q(content__updated_by=user_id) ).exclude( Q(current_checksum=F("synced_checksum"), content__deleted__isnull=True) & Q(synced_checksum__isnull=False) ) modified_element_list = [] synced_results = [] for sync_state in unsynced_states.iterator(): content = sync_state.content filepath = get_destination_filepath(content, self.site_config) if not filepath: continue synced_results.append( SyncResult( sync_id=sync_state.id, filepath=filepath, checksum=content.calculate_checksum(), deleted=content.deleted is not None, ) ) data = serialize_content_to_file( site_config=self.site_config, website_content=content ) # Add any modified files modified_element_list.extend( self.get_tree_elements(sync_state, data, filepath) ) if len(modified_element_list) == 0: return commit = self.commit_tree( modified_element_list, User.objects.filter(id=user_id).first() ) # Save last git filepath and checksum to sync state for sync_result in synced_results: sync_state = ContentSyncState.objects.get(id=sync_result.sync_id) if sync_result.deleted: sync_state.content.delete(force_policy=HARD_DELETE) else: sync_state.data = {GIT_DATA_FILEPATH: sync_result.filepath} sync_state.synced_checksum = sync_result.checksum sync_state.save() return commit
def delete_content_file(self, content: WebsiteContent) -> Commit: """ Delete a file from git """ repo = self.get_repo() filepath = get_destination_filepath(content, self.site_config) sha = repo.get_contents(filepath).sha return repo.delete_file( filepath, f"Delete {filepath}", sha, committer=self.git_user(content.updated_by), )
def delete_orphaned_content_in_backend(self): """ Delete any git repo files without corresponding WebsiteContent objects""" sitepaths = [] for content in self.website.websitecontent_set.iterator(): sitepaths.append( get_destination_filepath(content, self.site_config)) # Include ContentSyncState paths, these should not be deleted if present if content.content_sync_state.data and content.content_sync_state.data.get( GIT_DATA_FILEPATH, None): sitepaths.append( content.content_sync_state.data[GIT_DATA_FILEPATH]) self.api.batch_delete_files([ path for path in self.api.get_all_file_paths("/") if path not in sitepaths ])
def upsert_content_file( self, website_content: WebsiteContent, **kwargs ) -> Optional[Commit]: """ Create or update a file in git. """ destination_filepath = get_destination_filepath( website_content, self.site_config ) if not destination_filepath: # No filepath, nothing to do return repo = self.get_repo() data = serialize_content_to_file( site_config=self.site_config, website_content=website_content ) git_user = self.git_user(website_content.updated_by) try: sha = repo.get_contents(destination_filepath).sha except: # pylint:disable=bare-except return repo.create_file( destination_filepath, f"Create {destination_filepath}", data, committer=git_user, author=git_user, **kwargs, ) return repo.update_file( destination_filepath, f"Update {destination_filepath}", data, sha, committer=git_user, author=git_user, **kwargs, )