def test_get_config(root: Path, config: Config) -> None: """ Test ``get_config``. """ config = get_config(root) assert config.dict() == { "author": { "email": "*****@*****.**", "name": "Beto Dealmeida", "note": "Este, sou eu", "url": "https://taoetc.org/", }, "title": "道&c.", "subtitle": "Musings about the path and other things", "language": "en", "categories": { "stem": { "description": "Science, technology, engineering, & math", "label": "STEM", "tags": ["blog", "programming"], }, }, "announcers": { "announcer": { "plugin": "announcer" } }, "assistants": { "assistant": { "plugin": "assistant" } }, "builders": { "builder": { "announce_on": ["announcer"], "home": "https://example.com/", "path": "generic", "plugin": "builder", "publish_to": ["publisher"], }, }, "publishers": { "publisher": { "plugin": "publisher" } }, "social": [{ "title": "My page", "url": "https://example.com/user" }], "templates": { "short": [] }, } with pytest.raises(SystemExit) as excinfo: get_config(Path("/path/to")) assert str(excinfo.value) == "No configuration found!"
async def run(root: Path, force: bool = False) -> None: """ Create a new blog skeleton. """ resources = sorted(resource_listdir("nefelibata", "templates/skeleton")) for resource in resources: origin = Path( resource_filename( "nefelibata", os.path.join("templates/skeleton", resource), ), ) target = root / resource if target.exists() and not force: resource_type = "Directory" if origin.is_dir() else "File" raise IOError(f"{resource_type} {target} already exists!") if origin.is_dir(): target.mkdir(parents=True, exist_ok=True) shutil.copytree(origin, target, dirs_exist_ok=True) else: target.parent.mkdir(parents=True, exist_ok=True) shutil.copy(origin, target) # create templates config = get_config(root) for builder in get_builders(root, config).values(): builder.setup() _logger.info("Blog created!")
async def run(root: Path, title: str, type_: str = "post") -> None: """ Create a new post and open editor. """ _logger.info("Creating new directory") directory = secure_filename(title).lower() target = root / "posts" / directory if target.exists(): raise IOError("Directory already exists!") target.mkdir(parents=True) headers = { "subject": title, "summary": "", "keywords": "", } if type_ != "post": config = get_config(root) try: extra_headers = config.templates[type_] except KeyError as ex: raise Exception(f"Invalid post type: {type_}") from ex headers["type"] = type_ headers.update({f"{type_}-{key}": "" for key in extra_headers}) filepath = target / "index.mkd" with open(filepath, "w", encoding="utf-8") as output: for key, value in headers.items(): output.write(f"{key}: {value}\n") output.write("\n\n") editor = os.environ.get("EDITOR") if not editor: _logger.info("No EDITOR found, exiting") return call([editor, filepath])
async def run( # pylint: disable=too-many-locals root: Path, force: bool = False, ) -> None: """ Build blog from Markdown files and online interactions. """ _logger.info("Building blog") config = get_config(root) _logger.debug(config) build = root / "build" if not build.exists(): _logger.info("Creating `build/` directory") build.mkdir() posts = get_posts(root, config) # collect interactions from posts/site tasks = [] post_interactions: Dict[Path, Dict[str, Interaction]] = defaultdict(dict) _logger.info("Collecting interactions from posts") announcers = get_announcers(root, config, Scope.POST) for post in posts: for name, announcer in announcers.items(): if name in post.announcers: task = asyncio.create_task( collect_post(post, announcer, post_interactions), ) tasks.append(task) _logger.info("Collecting interactions from site") announcers = get_announcers(root, config, Scope.SITE) for announcer in announcers.values(): task = asyncio.create_task(collect_site(announcer, post_interactions)) tasks.append(task) await asyncio.gather(*tasks) # store new interactions tasks = [] for path, interactions in post_interactions.items(): task = asyncio.create_task(save_interactions(path.parent, interactions)) tasks.append(task) await asyncio.gather(*tasks) # run assistants tasks = [] _logger.info("Running post assistants") assistants = get_assistants(root, config, Scope.POST) for post in posts: for assistant in assistants.values(): task = asyncio.create_task(assistant.process_post(post, force)) tasks.append(task) _logger.info("Running site assistants") assistants = get_assistants(root, config, Scope.SITE) for assistant in assistants.values(): task = asyncio.create_task(assistant.process_site(force)) tasks.append(task) await asyncio.gather(*tasks) # build posts/site tasks = [] builders = get_builders(root, config) _logger.info("Processing posts") for post in posts: for builder in builders.values(): task = asyncio.create_task(builder.process_post(post, force)) tasks.append(task) _logger.info("Processing site") for builder in builders.values(): task = asyncio.create_task(builder.process_site(force)) tasks.append(task) await asyncio.gather(*tasks)
async def run( # pylint: disable=too-many-locals root: Path, force: bool = False, ) -> None: """ Publish blog. """ _logger.info("Publishing blog") config = get_config(root) _logger.debug(config) # publish site publishings = load_yaml(root / PUBLISHINGS_FILENAME, Publishing) tasks = [] for name, publisher in get_publishers(root, config).items(): since = publishings[name].timestamp if name in publishings else None task = asyncio.create_task( publish_site(name, publisher, publishings, since, force), ) tasks.append(task) await asyncio.gather(*tasks) # persist publishings with open(root / PUBLISHINGS_FILENAME, "w", encoding="utf-8") as output: yaml.dump( { name: publishing.dict() for name, publishing in publishings.items() }, output, ) # announcements tasks = [] # announce site site_announcements = load_yaml(root / ANNOUNCEMENTS_FILENAME, Announcement) last_published = max(publishing.timestamp for publishing in publishings.values()) announcers = get_announcers(root, config, Scope.SITE) for name, announcer in announcers.items(): if (name in site_announcements and (site_announcements[name].timestamp + timedelta(seconds=site_announcements[name].grace_seconds)) >= last_published): # already announced after last published _logger.info("Announcer %s is up-to-date", name) continue task = asyncio.create_task( announce_site(name, announcer, site_announcements)) tasks.append(task) # announce posts modified_post_announcements: Dict[Path, Dict[str, Announcement]] = {} announcers = get_announcers(root, config, Scope.POST) for post in get_posts(root, config): path = post.path.parent / ANNOUNCEMENTS_FILENAME post_announcements = load_yaml(path, Announcement) post_announcers = { name: announcers[name] for name in post.announcers if name in announcers and name not in post_announcements } for name, announcer in post_announcers.items(): task = asyncio.create_task( announce_post(name, announcer, post, post_announcements), ) tasks.append(task) # store new announcements to persist later if post_announcers: modified_post_announcements[post.path] = post_announcements await asyncio.gather(*tasks) # persist new announcements tasks = [] task = asyncio.create_task(save_announcements(root, site_announcements)) for post_path, announcements in modified_post_announcements.items(): task = asyncio.create_task( save_announcements(post_path.parent, announcements)) tasks.append(task) await asyncio.gather(*tasks)