示例#1
0
def invalidate_all_pages_related_to_video(video_id=None, video_slug=None):
    """Given a video file, recurse backwards up the hierarchy and invalidate all pages.
    Also include video pages and related exercise pages.
    """
    assert (video_id or video_slug) and not (video_id and video_slug), "One arg, not two" 

    # Expire all video files and related paths
    video_paths = get_video_page_paths(video_id=video_id, video_slug=video_slug)
    exercise_paths = get_exercise_page_paths(video_id=video_id, video_slug=video_slug)
    leaf_paths = set(video_paths).union(set(exercise_paths))

    for leaf_path in leaf_paths:
        all_paths = generate_all_paths(path=leaf_path, base_path=topic_tools.get_topic_tree()['path'])
        for path in filter(has_cache_key, all_paths):  # start at the root
            expire_page(path=path)
示例#2
0
import json
import os

import settings
from utils import topic_tools


TOPICS          = topic_tools.get_topic_tree()
NODE_CACHE      = topic_tools.get_node_cache()
ID2SLUG_MAP     = topic_tools.get_id2slug_map()
示例#3
0
import json
import os

import settings
from utils import topic_tools

TOPICS = topic_tools.get_topic_tree()
NODE_CACHE = topic_tools.get_node_cache()
EXERCISE_TOPICS = topic_tools.get_exercise_topics()
ID2SLUG_MAP = topic_tools.get_id2slug_map()
LANGUAGE_LOOKUP = json.loads(
    open(os.path.join(settings.DATA_PATH, "languages.json")).read())
LANGUAGE_LIST = json.loads(
    open(os.path.join(settings.SUBTITLES_DATA_ROOT,
                      "listedlanguages.json")).read())
示例#4
0
def generate_node_cache(topictree=None, output_dir=settings.DATA_PATH):
    """
    Given the KA Lite topic tree, generate a dictionary of all Topic, Exercise, and Video nodes.
    """

    if not topictree:
        topictree = topic_tools.get_topic_tree(force=True)
    node_cache = {}

    def recurse_nodes(node, path="/"):
        # Add the node to the node cache
        kind = node["kind"]
        node_cache[kind] = node_cache.get(kind, {})

        if node["slug"] in node_cache[kind]:
            # Existing node, so append the path to the set of paths
            assert kind in topic_tools.multipath_kinds, "Make sure we expect to see multiple nodes map to the same slug (%s unexpected)" % kind

            # Before adding, let's validate some basic properties of the
            #   stored node and the new node:
            # 1. Compare the keys, and make sure that they overlap
            #      (except the stored node will not have 'path', but instead 'paths')
            # 2. For string args, check that values are the same
            #      (most/all args are strings, and ... I feel we're already being darn
            #      careful here.  So, I think it's enough.
            node_shared_keys = set(node.keys()) - set(["path"])
            stored_shared_keys = set(node_cache[kind][node["slug"]]) - set(
                ["paths"])
            unshared_keys = node_shared_keys.symmetric_difference(
                stored_shared_keys)
            shared_keys = node_shared_keys.intersection(stored_shared_keys)
            assert not unshared_keys, "Node and stored node should have all the same keys."
            for key in shared_keys:
                # A cursory check on values, for strings only (avoid unsafe types)
                if isinstance(node[key], basestring):
                    assert node[key] == node_cache[kind][node["slug"]][key]

            # We already added this node, it's just found at multiple paths.
            #   So, save the new path
            node_cache[kind][node["slug"]]["paths"].append(node["path"])

        else:
            # New node, so copy off, massage, and store.
            node_copy = copy.copy(node)
            if "children" in node_copy:
                del node_copy["children"]
            if kind in topic_tools.multipath_kinds:
                # If multiple paths can map to a single slug, need to store all paths.
                node_copy["paths"] = [node_copy["path"]]
                del node_copy["path"]
            node_cache[kind][node["slug"]] = node_copy

        # Do the recursion
        for child in node.get("children", []):
            assert "path" in node and "paths" not in node, "This code can't handle nodes with multiple paths; it just generates them!"
            recurse_nodes(child, node["path"])

    recurse_nodes(topictree)

    with open(os.path.join(output_dir, topic_tools.node_cache_file),
              "w") as fp:
        fp.write(json.dumps(node_cache, indent=2))

    return node_cache
示例#5
0
def regenerate_all_pages_related_to_videos(video_ids):
    """Same as above, but on a list of videos"""
    paths_to_regenerate = set() # unique set
    for video_id in video_ids:

        for video_path in get_video_page_paths(video_id=video_id):
            paths_to_regenerate = paths_to_regenerate.union(generate_all_paths(path=video_path, base_path=topic_tools.get_topic_tree()['path']))  # start at the root
        for exercise_path in get_exercise_page_paths(video_id=video_id):
            paths_to_regenerate = paths_to_regenerate.union(generate_all_paths(path=exercise_path, base_path=topic_tools.get_topic_tree()['path']))  # start at the root

    # Now, regenerate any page.
    for path in paths_to_regenerate:
        create_cache(path=path, force=True)

    return paths_to_regenerate
示例#6
0
文件: videos.py 项目: mjptak/ka-lite
def get_video_ids_for_topic(topic_id, topic_tree=None):
    """Nobody actually calls this, just for utility when using the shell."""
    topic_tree = topic_tree or get_topic_tree()
    return utils.videos.get_video_ids_for_topic(topic_id, topic_tree)
示例#7
0
def generate_node_cache(topictree=None, output_dir=settings.DATA_PATH):
    """
    Given the KA Lite topic tree, generate a dictionary of all Topic, Exercise, and Video nodes.
    """

    if not topictree:
        topictree = topic_tools.get_topic_tree(force=True)
    node_cache = {}


    def recurse_nodes(node, path="/"):
        # Add the node to the node cache
        kind = node["kind"]
        node_cache[kind] = node_cache.get(kind, {})
        
        if node["slug"] in node_cache[kind]:
            # Existing node, so append the path to the set of paths
            assert kind in topic_tools.multipath_kinds, "Make sure we expect to see multiple nodes map to the same slug (%s unexpected)" % kind

            # Before adding, let's validate some basic properties of the 
            #   stored node and the new node:
            # 1. Compare the keys, and make sure that they overlap 
            #      (except the stored node will not have 'path', but instead 'paths')
            # 2. For string args, check that values are the same
            #      (most/all args are strings, and ... I feel we're already being darn
            #      careful here.  So, I think it's enough.
            node_shared_keys = set(node.keys()) - set(["path"])
            stored_shared_keys = set(node_cache[kind][node["slug"]]) - set(["paths"])
            unshared_keys = node_shared_keys.symmetric_difference(stored_shared_keys)
            shared_keys = node_shared_keys.intersection(stored_shared_keys)
            assert not unshared_keys, "Node and stored node should have all the same keys."
            for key in shared_keys:
                # A cursory check on values, for strings only (avoid unsafe types)
                if isinstance(node[key], basestring):
                    assert node[key] == node_cache[kind][node["slug"]][key]

            # We already added this node, it's just found at multiple paths.
            #   So, save the new path
            node_cache[kind][node["slug"]]["paths"].append(node["path"])

        else:
            # New node, so copy off, massage, and store.
            node_copy = copy.copy(node)
            if "children" in node_copy:
                del node_copy["children"]
            if kind in topic_tools.multipath_kinds:
                # If multiple paths can map to a single slug, need to store all paths.
                node_copy["paths"] = [node_copy["path"]]
                del node_copy["path"]
            node_cache[kind][node["slug"]] = node_copy

        # Do the recursion
        for child in node.get("children", []):
            assert "path" in node and "paths" not in node, "This code can't handle nodes with multiple paths; it just generates them!"
            recurse_nodes(child, node["path"])


    recurse_nodes(topictree)

    with open(os.path.join(output_dir, topic_tools.node_cache_file), "w") as fp:
        fp.write(json.dumps(node_cache, indent=2))

    return node_cache