Example #1
0
def page_content(lesson,
                 page,
                 solution=None,
                 course=None,
                 lesson_url=None,
                 subpage_url=None,
                 static_url=None,
                 without_cache=False):
    variables = None
    if course is not None:
        variables = course.vars

    def content_creator():
        """Return the content and all relative URLs used in it.

        Since the content is stored in cache and can be reused elsewhere, URLs
        must be stored as relative to the current page, so new absolute urls
        can be generated where the content is reused.
        """
        with temporary_url_for_logger(app) as logger:
            with logger:
                content = page.render_html(solution=solution,
                                           static_url=static_url,
                                           lesson_url=lesson_url,
                                           subpage_url=subpage_url,
                                           vars=variables)

            absolute_urls = [
                url_for(logged[0], **logged[1])
                for logged in logger.logged_calls
            ]

        relative_urls = [
            get_relative_url(request.path, x) for x in absolute_urls
        ]

        return {"content": content, "urls": relative_urls}

    # Only use the cache if there are no local changes
    # and not rendering in fork
    if without_cache or is_dirty(Repo(".")):
        return content_creator()

    # Since ARCA_IGNORE_CACHE_ERRORS is used, this won't fail in forks
    # even if the cache doesn't work.
    # This is only dangerous if the fork sets absolute path to cache and
    # CurrentEnvironmentBackend or VenvBackend are used locally.
    # FIXME? But I don't think there's a way to prevent writing
    # to a file in those backends
    content_key = page_content_cache_key(Repo("."), lesson.slug, page.slug,
                                         solution, variables)
    cached = arca.region.get_or_create(content_key, content_creator)

    # The urls are added twice to ``absolute_urls_to_freeze``
    # when the content is created.
    # But it doesn't matter, duplicate URLs are skipped.
    absolute_urls = [urljoin(request.path, x) for x in cached["urls"]]
    absolute_urls_to_freeze.extend(absolute_urls)

    return cached
Example #2
0
def test_is_dirty(temp_repo_static):
    assert not is_dirty(temp_repo_static.repo)

    fl = temp_repo_static.repo_path / str(uuid4())
    fl.touch()

    assert is_dirty(temp_repo_static.repo)

    fl.unlink()

    assert not is_dirty(temp_repo_static.repo)

    original_text = temp_repo_static.file_path.read_text()

    temp_repo_static.file_path.write_text(original_text + str(uuid4()))

    assert is_dirty(temp_repo_static.repo)

    temp_repo_static.file_path.write_text(original_text)

    assert not is_dirty(temp_repo_static.repo)