Пример #1
0
def add_to_summary(sender, **kwargs):
    assert UserLog.is_enabled(), "We shouldn't be saving unless UserLog is enabled."

    instance = kwargs["instance"]

    if not instance.start_datetime:
        raise ValidationError("start_datetime cannot be None")
    if instance.last_active_datetime and instance.start_datetime > instance.last_active_datetime:
        raise ValidationError("UserLog date consistency check for start_datetime and last_active_datetime")

    if instance.end_datetime and not instance.total_seconds:
        # Compute total_seconds, save to summary
        #   Note: only supports setting end_datetime once!
        instance.full_clean()

        # The top computation is more lenient: user activity is just time logged in, literally.
        # The bottom computation is more strict: user activity is from start until the last "action"
        #   recorded--in the current case, that means from login until the last moment an exercise or
        #   video log was updated.
        #instance.total_seconds = datediff(instance.end_datetime, instance.start_datetime, units="seconds")
        instance.total_seconds = 0 if not instance.last_active_datetime else datediff(instance.last_active_datetime, instance.start_datetime, units="seconds")

        # Confirm the result (output info first for easier debugging)
        if instance.total_seconds < 0:
            raise ValidationError("Total learning time should always be non-negative.")
        logging.debug("%s: total time (%d): %d seconds" % (instance.user.username, instance.activity_type, instance.total_seconds))

        # Save only completed log items to the UserLogSummary
        UserLogSummary.add_log_to_summary(instance)
    def handle(self, *args, **options):

        # Check that we can run
        if not settings.CENTRAL_SERVER:
            raise CommandError("This must only be run on the central server.")
        supported_langs = get_supported_languages()
        if not options["lang_codes"]:
            lang_codes = supported_langs
        else:
            requested_codes = set(options["lang_codes"].split(","))
            lang_codes = [lcode_to_ietf(lc) for lc in requested_codes if lc in supported_langs]
            unsupported_codes = requested_codes - set(lang_codes)
            if unsupported_codes:
                raise CommandError("Requested unsupported languages: %s" % sorted(list(unsupported_codes)))

        # Scrub options
        for key in options:
            # If no_update is set, then disable all update options.
            if key.startswith("update_"):
                options[key] = options[key] and not options["no_update"]

        if version_diff(options["version"], "0.10.3") < 0:
            raise CommandError("This command cannot be used for versions before 0.10.3")

        if options['low_mem']:
            logging.info('Making the GC more aggressive...')
            gc.set_threshold(36, 2, 2)

        # For dealing with central server changes across versions
        upgrade_old_schema()

        # Now, we're going to build the language packs, collecting metadata long the way.
        package_metadata = update_language_packs(lang_codes, options)
Пример #3
0
    def save(self, update_userlog=True, *args, **kwargs):
        # To deal with backwards compatibility,
        #   check video_id, whether imported or not.
        if not self.video_id:
            assert kwargs.get("imported", False), "video_id better be set by internal code."
            assert self.youtube_id, "If not video_id, you better have set youtube_id!"
            self.video_id = i18n.get_video_id(self.youtube_id) or self.youtube_id  # for unknown videos, default to the youtube_id

        if not kwargs.get("imported", False):
            self.full_clean()

            # Compute learner status
            already_complete = self.complete
            self.complete = (self.points >= VideoLog.POINTS_PER_VIDEO)
            if not already_complete and self.complete:
                self.completion_timestamp = datetime.now()

            # Tell logins that they are still active (ignoring validation failures).
            #   TODO(bcipolli): Could log video information in the future.
            if update_userlog:
                try:
                    UserLog.update_user_activity(self.user, activity_type="login", update_datetime=(self.completion_timestamp or datetime.now()), language=self.language)
                except ValidationError as e:
                    logging.error("Failed to update userlog during video: %s" % e)

        super(VideoLog, self).save(*args, **kwargs)
Пример #4
0
    def download_kmap_icons(knowledge_map):
        for key, value in knowledge_map["topics"].items():
            # Note: id here is retrieved from knowledge_map, so we're OK
            #   that we blew away ID in the topic tree earlier.
            if "icon_url" not in value:
                logging.warn("No icon URL for %s" % key)

            value["icon_url"] = iconfilepath + value["id"] + iconextension
            knowledge_map["topics"][key] = value

            out_path = data_path + "../" + value["icon_url"]
            if os.path.exists(out_path) and not force_icons:
                continue

            icon_khan_url = "http://www.khanacademy.org" + value["icon_url"]
            sys.stdout.write("Downloading icon %s from %s..." % (value["id"], icon_khan_url))
            sys.stdout.flush()
            try:
                icon = requests.get(icon_khan_url)
            except Exception as e:
                sys.stdout.write("\n")  # complete the "downloading" output
                sys.stderr.write("Failed to download %-80s: %s\n" % (icon_khan_url, e))
                continue
            if icon.status_code == 200:
                iconfile = file(data_path + "../" + value["icon_url"], "w")
                iconfile.write(icon.content)
            else:
                sys.stdout.write(" [NOT FOUND]")
                value["icon_url"] = iconfilepath + defaulticon + iconextension
            sys.stdout.write(" done.\n")  # complete the "downloading" output
def update_json(youtube_id, lang_code, downloaded, api_response, time_of_attempt):
    """Update language_srt_map to reflect download status

    lang_code in IETF format
    """
    # Open JSON file
    filepath = get_lang_map_filepath(lang_code)
    language_srt_map = softload_json(filepath, logger=logging.error)
    if not language_srt_map:
        return False

    # create updated entry
    entry = language_srt_map[youtube_id]
    entry["downloaded"] = downloaded
    entry["api_response"] = api_response
    entry["last_attempt"] = time_of_attempt
    if api_response == "success":
        entry["last_success"] = time_of_attempt

    # update full-size JSON with new information
    language_srt_map[youtube_id].update(entry)

    # write it to file
    json_file = open(filepath, "wb")
    json_file.write(json.dumps(language_srt_map))
    json_file.close()
    logging.debug("File updated.")

    return True
Пример #6
0
 def process_request(self, request):
     next = request.GET.get("next", "")
     if next.startswith("/"):
         logging.debug("next='%s'" % next)
         request.next = next
     else:
         request.next = ""
def generate_fake_coachreport_logs(password="******"):
    t,_ = FacilityUser.objects.get_or_create(
        facility=Facility.objects.all()[0],
        username=random.choice(firstnames)
    )
    t.set_password(password)

    # TODO: create flags later
    num_logs = 20
    logs = []
    for _ in xrange(num_logs):
        date_logged_in = datetime.datetime.now() - datetime.timedelta(days=random.randint(1,10))
        date_viewed_coachreport = date_logged_in + datetime.timedelta(minutes=random.randint(0, 30))
        date_logged_out = date_viewed_coachreport + datetime.timedelta(minutes=random.randint(0, 30))
        login_log = UserLog.objects.create(
            user=t,
            activity_type=UserLog.get_activity_int("login"),
            start_datetime=date_logged_in,
            last_active_datetime=date_viewed_coachreport,
            end_datetime=date_logged_out,
        )
        logging.info("created login log for teacher %s" % t.username)
        coachreport_log = UserLog.objects.create(
            user=t,
            activity_type=UserLog.get_activity_int("coachreport"),
            start_datetime=date_viewed_coachreport,
            last_active_datetime=date_viewed_coachreport,
            end_datetime=date_viewed_coachreport,
        )
        logs.append((login_log, coachreport_log))
        logging.info("created coachreport log for teacher %s" % t.username)
    return logs
        def validate_times(srt_content, srt_issues):
            times = re.findall("([0-9:,]+) --> ([0-9:,]+)\r\n", srt_content, re.S | re.M)

            parse_time = lambda str: datetime.datetime.strptime(str, "%H:%M:%S,%f")
            for i in range(len(times)):
                try:
                    between_subtitle_time = datediff(
                        parse_time(times[i][0]), parse_time(times[i - 1][1] if i > 0 else "00:00:00,000")
                    )
                    within_subtitle_time = datediff(parse_time(times[i][1]), parse_time(times[i][0]))

                    if between_subtitle_time > 60.0:
                        srt_issues.append("Between-subtitle gap of %5.2f seconds" % between_subtitle_time)

                    if within_subtitle_time > 60.0:
                        srt_issues.append("Within-subtitle duration of %5.2f seconds" % within_subtitle_time)
                    elif within_subtitle_time == 0.0:
                        logging.debug("Subtitle flies by too fast (%s --> %s)." % times[i])

                    # print "Start: %s\tB: %5.2f\tW: %5.2f" % (parse_time(times[i][0]), between_subtitle_time, within_subtitle_time)
                except Exception as e:
                    if not times[i][1].startswith("99:59:59"):
                        srt_issues.append("Error checking times: %s" % e)
                    else:
                        if len(times) - i > 1 and len(times) - i - 1 > len(times) / 10.0:
                            if i == 0:
                                srt_issues.append("No subtitles have a valid starting point.")
                            else:
                                logging.debug(
                                    "Hit end of movie, but %d (of %d) subtitle(s) remain in the queue."
                                    % (len(times) - i - 1, len(times))
                                )
                        break
Пример #9
0
    def recurse_nodes_to_extract_knowledge_map(node, node_cache):
        """
        Internal function for recursing the topic tree and building the knowledge map.
        Requires rebranding of metadata done by recurse_nodes function.
        """
        assert node["kind"] == "Topic"

        if node.get("in_knowledge_map", None):
            if node["slug"] not in knowledge_map["topics"]:
                logging.debug("Not in knowledge map: %s" % node["slug"])
                node["in_knowledge_map"] = False
                for node in node_cache["Topic"][node["slug"]]:
                    node["in_knowledge_map"] = False

            knowledge_topics[node["slug"]] = topic_tools.get_all_leaves(node, leaf_type="Exercise")

            if not knowledge_topics[node["slug"]]:
                sys.stderr.write("Removing topic from topic tree: no exercises. %s" % node["slug"])
                del knowledge_topics[node["slug"]]
                del knowledge_map["topics"][node["slug"]]
                node["in_knowledge_map"] = False
                for node in node_cache["Topic"][node["slug"]]:
                    node["in_knowledge_map"] = False
        else:
            if node["slug"] in knowledge_map["topics"]:
                sys.stderr.write("Removing topic from topic tree; does not belong. '%s'" % node["slug"])
                logging.warn("Removing from knowledge map: %s" % node["slug"])
                del knowledge_map["topics"][node["slug"]]

        for child in [n for n in node.get("children", []) if n["kind"] == "Topic"]:
            recurse_nodes_to_extract_knowledge_map(child, node_cache)
def update_metadata(package_metadata, version=VERSION):
    """
    We've zipped the packages, and now have unzipped & zipped sizes.
    Update this info in the local metadata (but not inside the zip)
    """
    master_filepath = get_language_pack_availability_filepath(version=version)
    master_metadata = softload_json(master_filepath, logger=logging.warn, errmsg="Error opening master language pack metadata")

    for lc, updated_meta in package_metadata.iteritems():
        lang_code_ietf = lcode_to_ietf(lc)

        # Gather existing metadata
        metadata_filepath = get_language_pack_metadata_filepath(lang_code_ietf, version=version)
        stored_meta = softload_json(metadata_filepath, logger=logging.warn, errmsg="Error opening %s language pack metadata" % lc)

        stored_meta.update(updated_meta)

        # Write locally (this is used on download by distributed server to update it's database)
        with open(metadata_filepath, 'w') as output:
            json.dump(stored_meta, output)

        # Update master (this is used for central server to handle API requests for data)
        master_metadata[lang_code_ietf] = stored_meta

    # Save updated master
    ensure_dir(os.path.dirname(master_filepath))
    with open(master_filepath, 'w') as output:
        json.dump(master_metadata, output)
    logging.info("Local record of translations updated")
Пример #11
0
    def update_user_activity(cls, user, activity_type="login", update_datetime=None, language=None, suppress_save=False):
        """Helper function to update an existing user activity log entry."""

        # Do nothing if the max # of records is zero
        # (i.e. this functionality is disabled)
        if not cls.is_enabled():
            return

        if not user:
            raise ValidationError("A valid user must always be specified.")
        if not update_datetime:  # must be done outside the function header (else becomes static)
            update_datetime = datetime.now()
        activity_type = cls.get_activity_int(activity_type)

        cur_log = cls.get_latest_open_log_or_None(user=user, activity_type=activity_type)
        if cur_log:
            # How could you start after you updated??
            if cur_log.start_datetime > update_datetime:
                raise ValidationError("Update time must always be later than the login time.")
        else:
            # No unstopped starts.  Start should have been called first!
            logging.warn("%s: Had to create a user log entry on an UPDATE(%d)! @ %s" % (user.username, activity_type, update_datetime))
            cur_log = cls.begin_user_activity(user=user, activity_type=activity_type, start_datetime=update_datetime, suppress_save=True)

        logging.debug("%s: UPDATE activity (%d) @ %s" % (user.username, activity_type, update_datetime))
        cur_log.last_active_datetime = update_datetime
        cur_log.language = language or cur_log.language  # set the language to the current language, if there is one.
        if not suppress_save:
            cur_log.save()
        return cur_log
Пример #12
0
    def end_user_activity(cls, user, activity_type="login", end_datetime=None, suppress_save=False):  # don't accept language--we're just closing previous activity.
        """Helper function to complete an existing user activity log entry."""

        # Do nothing if the max # of records is zero
        # (i.e. this functionality is disabled)
        if not cls.is_enabled():
            return

        if not user:
            raise ValidationError("A valid user must always be specified.")
        if not end_datetime:  # must be done outside the function header (else becomes static)
            end_datetime = datetime.now()
        activity_type = cls.get_activity_int(activity_type)

        cur_log = cls.get_latest_open_log_or_None(user=user, activity_type=activity_type)

        if cur_log:
            # How could you start after you ended??
            if cur_log.start_datetime > end_datetime:
                raise ValidationError("Update time must always be later than the login time.")
        else:
            # No unstopped starts.  Start should have been called first!
            logging.warn("%s: Had to BEGIN a user log entry, but ENDING(%d)! @ %s" % (user.username, activity_type, end_datetime))
            cur_log = cls.begin_user_activity(user=user, activity_type=activity_type, start_datetime=end_datetime, suppress_save=True)

        logging.debug("%s: Logging LOGOUT activity @ %s" % (user.username, end_datetime))
        cur_log.end_datetime = end_datetime
        if not suppress_save:
            cur_log.save()  # total-seconds will be computed here.
        return cur_log
Пример #13
0
    def begin_user_activity(cls, user, activity_type="login", start_datetime=None, language=None, suppress_save=False):
        """Helper function to create a user activity log entry."""

        # Do nothing if the max # of records is zero
        # (i.e. this functionality is disabled)
        if not cls.is_enabled():
            return

        if not user:
            raise ValidationError("A valid user must always be specified.")
        if not start_datetime:  # must be done outside the function header (else becomes static)
            start_datetime = datetime.now()
        activity_type = cls.get_activity_int(activity_type)

        cur_log = cls.get_latest_open_log_or_None(user=user, activity_type=activity_type)
        if cur_log:
            # Seems we're logging in without logging out of the previous.
            #   Best thing to do is simulate a login
            #   at the previous last update time.
            #
            # Note: this can be a recursive call
            logging.warn("%s: had to END activity on a begin(%d) @ %s" % (user.username, activity_type, start_datetime))
            # Don't mark current language when closing an old one
            cls.end_user_activity(user=user, activity_type=activity_type, end_datetime=cur_log.last_active_datetime)  # can't suppress save
            cur_log = None

        # Create a new entry
        logging.debug("%s: BEGIN activity(%d) @ %s" % (user.username, activity_type, start_datetime))
        cur_log = cls(user=user, activity_type=activity_type, start_datetime=start_datetime, last_active_datetime=start_datetime, language=language)
        if not suppress_save:
            cur_log.save()

        return cur_log
Пример #14
0
    def __init__(self, comment=None, fixture=None, **kwargs):

        self.return_dict = {}
        self.return_dict['comment'] = comment
        self.return_dict['class']=type(self).__name__
        self.return_dict['uname'] = platform.uname()
        self.return_dict['fixture'] = fixture
        try:
            self.verbosity = int(kwargs.get("verbosity"))
        except:
            self.verbosity = 1

        try:
            branch = subprocess.Popen(["git", "describe", "--contains", "--all", "HEAD"], stdout=subprocess.PIPE).communicate()[0]
            self.return_dict['branch'] = branch[:-1]
            head = subprocess.Popen(["git", "log", "--pretty=oneline", "--abbrev-commit", "--max-count=1"], stdout=subprocess.PIPE).communicate()[0]
            self.return_dict['head'] = head[:-1]
        except:
            self.return_dict['branch'] = None
            self.return_dict['head'] = None

        # if setup fails, what could we do?
        #   let the exception bubble up is the best.
        try:
            self._setup(**kwargs)
        except Exception as e:
            logging.debug("Failed setup (%s); trying to tear down" % e)
            try:
                self._teardown()
            except:
                pass
            raise e
Пример #15
0
def select_best_available_language(target_code, available_codes=None):
    """
    Critical function for choosing the best available language for a resource,
    given a target language code.

    This is used by video and exercise pages, for example,
    to determine what file to serve, based on available resources
    and the current requested language.
    """

    # Scrub the input
    target_code = lcode_to_django_lang(target_code)
    if available_codes is None:
        available_codes = get_installed_language_packs().keys()
    available_codes = [lcode_to_django_lang(lc) for lc in available_codes]

    # Hierarchy of language selection
    if target_code in available_codes:
        actual_code = target_code
    elif target_code.split("-", 1)[0] in available_codes:
        actual_code = target_code.split("-", 1)[0]
    elif settings.LANGUAGE_CODE in available_codes:
        actual_code = settings.LANGUAGE_CODE
    elif "en" in available_codes:
        actual_code = "en"
    elif available_codes:
        actual_code = available_codes[0]
    else:
        actual_code = None

    if actual_code != target_code:
        logging.debug("Requested code %s, got code %s" % (target_code, actual_code))
    return actual_code
Пример #16
0
    def handle(self, *args, **options):

        if len(args)==1 and args[0]== "test":
            # Callback for "weak" test--checks at least that the django project compiles (local_settings is OK)
            sys.stdout.write("Success!\n")
            exit(0)

        try:
            if options.get("branch", None):
                # Specified a repo
                self.update_via_git(**options)

            elif options.get("zip_file", None):
                # Specified a file
                if not os.path.exists(options.get("zip_file")):
                    raise CommandError("Specified zip file does not exist: %s" % options.get("zip_file"))
                self.update_via_zip(**options)

            elif options.get("url", None):
                self.update_via_zip(**options)

            elif os.path.exists(settings.PROJECT_PATH + "/../.git"):
                # If we detect a git repo, try git
                if len(args) == 1 and not options["branch"]:
                    options["branch"] = args[0]
                elif len(args) != 0:
                    raise CommandError("Specified too many command-line arguments")
                self.update_via_git(**options)

            elif len(args) > 1:
                raise CommandError("Too many command-line arguments.")

            elif len(args) == 1:
                # Specify zip via first command-line arg
                if options['zip_file'] is not None:
                    raise CommandError("Cannot specify a zipfile as unnamed and named command-line arguments at the same time.")
                options['zip_file'] = args[0]
                self.update_via_zip(**options)

            else:
                # No params, no git repo: try to get a file online.
                zip_file = tempfile.mkstemp()[1]
                for url in ["http://%s/api/download/kalite/latest/%s/%s/" % (settings.CENTRAL_SERVER_HOST, platform.system().lower(), "en")]:
                    logging.info("Downloading repo snapshot from %s to %s" % (url, zip_file))
                    try:
                        urllib.urlretrieve(url, zip_file)
                        sys.stdout.write("success @ %s\n" % url)
                        break;
                    except Exception as e:
                        logging.debug("Failed to get zipfile from %s: %s" % (url, e))
                        continue
                options["zip_file"] = zip_file
                self.update_via_zip(**options)

        except Exception as e:
            if self.started() and not not self.ended():
                self.cancel(stage_status="error", notes=unicode(e))
            raise

        assert self.ended(), "Subroutines should complete() if they start()!"
Пример #17
0
    def recurse_nodes_to_clean_related_videos(node):
        """
        Internal function for recursing the topic tree and marking related exercises.
        Requires rebranding of metadata done by recurse_nodes function.
        """
        def get_video_node(video_slug, node):
            if node["kind"] == "Topic":
                for child in node.get("children", []):
                    video_node = get_video_node(video_slug, child)
                    if video_node:
                        return video_node
            elif node["kind"] == "Video" and node["slug"] == video_slug:
                return node

            return None

        if node["kind"] == "Exercise":
            videos_to_delete = []
            for vi, video_slug in enumerate(node["related_video_slugs"]):
                if not get_video_node(video_slug, topic_tree):
                    videos_to_delete.append(vi)
            for vi in reversed(videos_to_delete):
                logging.warn("Deleting unknown video %s" % node["related_video_slugs"][vi])
                del node["related_video_slugs"][vi]
        for child in node.get("children", []):
            recurse_nodes_to_clean_related_videos(child)
def get_language_pack(lang_code, software_version, callback):
    """Download language pack for specified language"""

    lang_code = lcode_to_ietf(lang_code)
    logging.info("Retrieving language pack: %s" % lang_code)
    request_url = get_language_pack_url(lang_code, software_version)
    path, response = download_file(request_url, callback=callback_percent_proxy(callback))
    return path
Пример #19
0
def invalidate_on_video_delete(sender, **kwargs):
    """
    Listen in to see when available videos become unavailable.
    """
    was_available = kwargs["instance"] and kwargs["instance"].percent_complete == 100
    if was_available:
        logging.debug("Invalidating cache on delete for %s" % kwargs["instance"])
        invalidate_all_caches()
Пример #20
0
def update_templates():
    """Update template po files"""
    pot_path = os.path.join(settings.DATA_PATH, "i18n", "pot")
    logging.info("Copying english po files to %s" % pot_path)

    #  post them to exposed URL
    ensure_dir(pot_path)
    shutil.copy(os.path.join(settings.LOCALE_PATHS[0], "en/LC_MESSAGES/django.po"), os.path.join(pot_path, "kalite.pot"))
    shutil.copy(os.path.join(settings.LOCALE_PATHS[0], "en/LC_MESSAGES/djangojs.po"), os.path.join(pot_path, "kalitejs.pot"))
Пример #21
0
def stamp_availability_on_topic(topic, videos_path=settings.CONTENT_ROOT, force=True, stamp_urls=True, update_counts_question_mark= None):
    """ Uses the (json) topic tree to query the django database for which video files exist

    Returns the original topic dictionary, with two properties added to each NON-LEAF node:
      * nvideos_known: The # of videos in and under that node, that are known (i.e. in the Khan Academy library)
      * nvideos_local: The # of vidoes in and under that node, that were actually downloaded and available locally
    And the following property for leaf nodes:
      * on_disk

    Input Parameters:
    * videos_path: the path to video files
    """
    if update_counts_question_mark is None:
        update_counts_question_mark = do_video_counts_need_update_question_mark()


    if not force and "nvideos_local" in topic:
        return (topic, topic["nvideos_local"], topic["nvideos_known"], False)

    nvideos_local = 0
    nvideos_known = 0

    # Can't deal with leaves
    assert topic["kind"] == "Topic", "Should not be calling this function on leaves; it's inefficient!"

    # Only look for videos if there are more branches
    if len(topic["children"]) == 0:
        logging.debug("no children: %s" % topic["path"])

    for child in topic["children"]:
        # RECURSIVE CALL:
        #  The children have children, let them figure things out themselves
        if "children" in child:
            if not force and "nvideos_local" in child:
                continue
            stamp_availability_on_topic(topic=child, videos_path=videos_path, force=force, stamp_urls=stamp_urls, update_counts_question_mark=update_counts_question_mark)
            nvideos_local += child["nvideos_local"]
            nvideos_known += child["nvideos_known"]

    # BASE CASE:
    # All my children are leaves, so we'll query here (a bit more efficient than 1 query per leaf)
    videos = get_videos(topic)
    for video in videos:
        if force or update_counts_question_mark or "availability" not in video:
            stamp_availability_on_video(video, force=force, stamp_urls=stamp_urls, videos_path=videos_path)
        nvideos_local += int(video["on_disk"])
    nvideos_known += len(videos)
    nvideos_available = nvideos_local if not settings.BACKUP_VIDEO_SOURCE else nvideos_known

    changed = "nvideos_local" in topic and topic["nvideos_local"] != nvideos_local
    changed = changed or ("nvideos_known" in topic and topic["nvideos_known"] != nvideos_known)
    topic["nvideos_local"] = nvideos_local
    topic["nvideos_known"] = nvideos_known
    topic["nvideos_available"] = nvideos_available
    topic["available"] = bool(nvideos_local) or bool(settings.BACKUP_VIDEO_SOURCE)

    return (topic, nvideos_local, nvideos_known, nvideos_available, changed)
Пример #22
0
def run_makemessages():
    """Run makemessages command for english po files"""
    logging.info("Executing makemessages command")
    # Generate english po file
    ignore_pattern = ['python-packages/*'] + ['kalite/%s/*' % dirname for dirname in ['central', 'contact', 'faq', 'registration', 'tests', 'stats']]
    call_command('makemessages', locale='en', ignore_patterns=ignore_pattern, no_obsolete=True)
    # Generate english po file for javascript
    ignore_pattern = ['kalite/static/admin/js/*', 'python-packages/*', 'kalite/static/js/i18n/*', 'kalite/static/js/khan-exercises/*']
    call_command('makemessages', domain='djangojs', locale='en', ignore_patterns=ignore_pattern, no_obsolete=True)
Пример #23
0
def invalidate_all_caches():
    """
    Basic entry-point for clearing necessary caches.  Most functions can
    call in here.
    """
    if caching_is_enabled():
        invalidate_inmemory_caches()
        invalidate_web_cache()
        logging.debug("Great success emptying all caches.")
Пример #24
0
def write_count_to_json(subtitle_counts, data_path):
    """Write JSON to file in static/data/subtitles/"""
    current_counts = softload_json(SUBTITLE_COUNTS_FILEPATH, logger=logging.error)
    current_counts.update(subtitle_counts)

    logging.debug("Writing fresh srt counts to %s" % SUBTITLE_COUNTS_FILEPATH)
    with open(SUBTITLE_COUNTS_FILEPATH, 'wb') as fp:
        # sort here, so we don't have to sort later when seving to clients
        json.dump(current_counts, fp, sort_keys=True)
Пример #25
0
def get_file2id_map(force=False):
    global YT2ID_MAP
    if YT2ID_MAP is None or force:
        YT2ID_MAP = {}
        for lang_code, dic in get_dubbed_video_map().iteritems():
            for english_youtube_id, dubbed_youtube_id in dic.iteritems():
                if dubbed_youtube_id in YT2ID_MAP:
                    logging.debug("conflicting entry of dubbed_youtube_id %s in %s dubbed video map" % (dubbed_youtube_id, lang_code))
                YT2ID_MAP[dubbed_youtube_id] = english_youtube_id  # assumes video id is the english youtube_id
    return YT2ID_MAP
Пример #26
0
def save_topic_tree(topic_tree=None, node_cache=None, data_path=os.path.join(settings.PROJECT_PATH, "static", "data")):
    assert bool(topic_tree) + bool(node_cache) == 1, "Must specify either topic_tree or node_cache parameter"

    # Dump the topic tree (again)
    topic_tree = topic_tree or node_cache["Topic"]["root"][0]

    dest_filepath = os.path.join(data_path, topic_tools.topics_file)
    logging.debug("Saving topic tree to %s" % dest_filepath)
    with open(dest_filepath, "w") as fp:
        fp.write(json.dumps(topic_tree, indent=2))
def unpack_language(lang_code, zip_filepath=None, zip_fp=None, zip_data=None):
    """Unpack zipped language pack into locale directory"""
    lang_code = lcode_to_django_dir(lang_code)

    logging.info("Unpacking new translations")
    ensure_dir(os.path.join(LOCALE_ROOT, lang_code, "LC_MESSAGES"))

    ## Unpack into temp dir
    z = zipfile.ZipFile(zip_fp or (StringIO(zip_data) if zip_data else open(zip_filepath, "rb")))
    z.extractall(os.path.join(LOCALE_ROOT, lang_code))
Пример #28
0
def get_exercise_page_paths(video_id=None):

    try:
        exercise_paths = set()
        for exercise in topic_tools.get_related_exercises(videos=topic_tools.get_node_cache("Video")[video_id]):
            exercise_paths = exercise_paths.union(set([exercise["path"]]))
        return list(exercise_paths)
    except Exception as e:
        logging.debug("Exception while getting exercise paths: %s" % e)
        return []
Пример #29
0
    def cancel_progress(self, stage_status=None, notes=None):
        """
        Stamps end time.
        """
        logging.info("Cancelling process %s" % (self.process_name))

        self.stage_status = stage_status or "cancelled"
        self.end_time = datetime.datetime.now()
        self.completed=False
        self.notes = notes
        self.save()
Пример #30
0
 def wait_until_starttime(starttime):
     time_to_sleep = (self.random.random() * 10.0) + 10
     if self.verbosity >= 1:
         print("(" + str(self.behavior_profile-24601) + ") waiting until it's time to start (%.1fs)." % time_to_sleep)
     time.sleep(time_to_sleep) #sleep
     now = datetime.datetime.today()
     if now.hour >= int(starttime[:2]):
         if now.minute >= int(starttime[-2:]):
             return False
     logging.debug("Go!")
     return True