def process_request(self, request):
     next = request.GET.get("next", "")
     if next.startswith("/"):
         logging.debug("next='%s'" % next)
         request.next = next
     else:
         request.next = ""
def update_json(youtube_id, lang_code, downloaded, api_response, time_of_attempt):
    """Update language_srt_map to reflect download status

    lang_code in IETF format
    """
    # Open JSON file
    filepath = get_lang_map_filepath(lang_code)
    language_srt_map = softload_json(filepath, logger=logging.error)
    if not language_srt_map:
        return False

    # create updated entry
    entry = language_srt_map[youtube_id]
    entry["downloaded"] = downloaded
    entry["api_response"] = api_response
    entry["last_attempt"] = time_of_attempt
    if api_response == "success":
        entry["last_success"] = time_of_attempt

    # update full-size JSON with new information
    language_srt_map[youtube_id].update(entry)

    # write it to file
    json_file = open(filepath, "wb")
    json_file.write(json.dumps(language_srt_map))
    json_file.close()
    logging.debug("File updated.")

    return True
Beispiel #3
0
def add_to_summary(sender, **kwargs):
    assert UserLog.is_enabled(), "We shouldn't be saving unless UserLog is enabled."

    instance = kwargs["instance"]

    if not instance.start_datetime:
        raise ValidationError("start_datetime cannot be None")
    if instance.last_active_datetime and instance.start_datetime > instance.last_active_datetime:
        raise ValidationError("UserLog date consistency check for start_datetime and last_active_datetime")

    if instance.end_datetime and not instance.total_seconds:
        # Compute total_seconds, save to summary
        #   Note: only supports setting end_datetime once!
        instance.full_clean()

        # The top computation is more lenient: user activity is just time logged in, literally.
        # The bottom computation is more strict: user activity is from start until the last "action"
        #   recorded--in the current case, that means from login until the last moment an exercise or
        #   video log was updated.
        #instance.total_seconds = datediff(instance.end_datetime, instance.start_datetime, units="seconds")
        instance.total_seconds = 0 if not instance.last_active_datetime else datediff(instance.last_active_datetime, instance.start_datetime, units="seconds")

        # Confirm the result (output info first for easier debugging)
        if instance.total_seconds < 0:
            raise ValidationError("Total learning time should always be non-negative.")
        logging.debug("%s: total time (%d): %d seconds" % (instance.user.username, instance.activity_type, instance.total_seconds))

        # Save only completed log items to the UserLogSummary
        UserLogSummary.add_log_to_summary(instance)
    def recurse_nodes_to_extract_knowledge_map(node, node_cache):
        """
        Internal function for recursing the topic tree and building the knowledge map.
        Requires rebranding of metadata done by recurse_nodes function.
        """
        assert node["kind"] == "Topic"

        if node.get("in_knowledge_map", None):
            if node["slug"] not in knowledge_map["topics"]:
                logging.debug("Not in knowledge map: %s" % node["slug"])
                node["in_knowledge_map"] = False
                for node in node_cache["Topic"][node["slug"]]:
                    node["in_knowledge_map"] = False

            knowledge_topics[node["slug"]] = topic_tools.get_all_leaves(node, leaf_type="Exercise")

            if not knowledge_topics[node["slug"]]:
                sys.stderr.write("Removing topic from topic tree: no exercises. %s" % node["slug"])
                del knowledge_topics[node["slug"]]
                del knowledge_map["topics"][node["slug"]]
                node["in_knowledge_map"] = False
                for node in node_cache["Topic"][node["slug"]]:
                    node["in_knowledge_map"] = False
        else:
            if node["slug"] in knowledge_map["topics"]:
                sys.stderr.write("Removing topic from topic tree; does not belong. '%s'" % node["slug"])
                logging.warn("Removing from knowledge map: %s" % node["slug"])
                del knowledge_map["topics"][node["slug"]]

        for child in [n for n in node.get("children", []) if n["kind"] == "Topic"]:
            recurse_nodes_to_extract_knowledge_map(child, node_cache)
Beispiel #5
0
    def update_user_activity(cls, user, activity_type="login", update_datetime=None, language=None, suppress_save=False):
        """Helper function to update an existing user activity log entry."""

        # Do nothing if the max # of records is zero
        # (i.e. this functionality is disabled)
        if not cls.is_enabled():
            return

        if not user:
            raise ValidationError("A valid user must always be specified.")
        if not update_datetime:  # must be done outside the function header (else becomes static)
            update_datetime = datetime.now()
        activity_type = cls.get_activity_int(activity_type)

        cur_log = cls.get_latest_open_log_or_None(user=user, activity_type=activity_type)
        if cur_log:
            # How could you start after you updated??
            if cur_log.start_datetime > update_datetime:
                raise ValidationError("Update time must always be later than the login time.")
        else:
            # No unstopped starts.  Start should have been called first!
            logging.warn("%s: Had to create a user log entry on an UPDATE(%d)! @ %s" % (user.username, activity_type, update_datetime))
            cur_log = cls.begin_user_activity(user=user, activity_type=activity_type, start_datetime=update_datetime, suppress_save=True)

        logging.debug("%s: UPDATE activity (%d) @ %s" % (user.username, activity_type, update_datetime))
        cur_log.last_active_datetime = update_datetime
        cur_log.language = language or cur_log.language  # set the language to the current language, if there is one.
        if not suppress_save:
            cur_log.save()
        return cur_log
Beispiel #6
0
    def end_user_activity(cls, user, activity_type="login", end_datetime=None, suppress_save=False):  # don't accept language--we're just closing previous activity.
        """Helper function to complete an existing user activity log entry."""

        # Do nothing if the max # of records is zero
        # (i.e. this functionality is disabled)
        if not cls.is_enabled():
            return

        if not user:
            raise ValidationError("A valid user must always be specified.")
        if not end_datetime:  # must be done outside the function header (else becomes static)
            end_datetime = datetime.now()
        activity_type = cls.get_activity_int(activity_type)

        cur_log = cls.get_latest_open_log_or_None(user=user, activity_type=activity_type)

        if cur_log:
            # How could you start after you ended??
            if cur_log.start_datetime > end_datetime:
                raise ValidationError("Update time must always be later than the login time.")
        else:
            # No unstopped starts.  Start should have been called first!
            logging.warn("%s: Had to BEGIN a user log entry, but ENDING(%d)! @ %s" % (user.username, activity_type, end_datetime))
            cur_log = cls.begin_user_activity(user=user, activity_type=activity_type, start_datetime=end_datetime, suppress_save=True)

        logging.debug("%s: Logging LOGOUT activity @ %s" % (user.username, end_datetime))
        cur_log.end_datetime = end_datetime
        if not suppress_save:
            cur_log.save()  # total-seconds will be computed here.
        return cur_log
Beispiel #7
0
    def __init__(self, comment=None, fixture=None, **kwargs):

        self.return_dict = {}
        self.return_dict['comment'] = comment
        self.return_dict['class']=type(self).__name__
        self.return_dict['uname'] = platform.uname()
        self.return_dict['fixture'] = fixture
        try:
            self.verbosity = int(kwargs.get("verbosity"))
        except:
            self.verbosity = 1

        try:
            branch = subprocess.Popen(["git", "describe", "--contains", "--all", "HEAD"], stdout=subprocess.PIPE).communicate()[0]
            self.return_dict['branch'] = branch[:-1]
            head = subprocess.Popen(["git", "log", "--pretty=oneline", "--abbrev-commit", "--max-count=1"], stdout=subprocess.PIPE).communicate()[0]
            self.return_dict['head'] = head[:-1]
        except:
            self.return_dict['branch'] = None
            self.return_dict['head'] = None

        # if setup fails, what could we do?
        #   let the exception bubble up is the best.
        try:
            self._setup(**kwargs)
        except Exception as e:
            logging.debug("Failed setup (%s); trying to tear down" % e)
            try:
                self._teardown()
            except:
                pass
            raise e
Beispiel #8
0
    def begin_user_activity(cls, user, activity_type="login", start_datetime=None, language=None, suppress_save=False):
        """Helper function to create a user activity log entry."""

        # Do nothing if the max # of records is zero
        # (i.e. this functionality is disabled)
        if not cls.is_enabled():
            return

        if not user:
            raise ValidationError("A valid user must always be specified.")
        if not start_datetime:  # must be done outside the function header (else becomes static)
            start_datetime = datetime.now()
        activity_type = cls.get_activity_int(activity_type)

        cur_log = cls.get_latest_open_log_or_None(user=user, activity_type=activity_type)
        if cur_log:
            # Seems we're logging in without logging out of the previous.
            #   Best thing to do is simulate a login
            #   at the previous last update time.
            #
            # Note: this can be a recursive call
            logging.warn("%s: had to END activity on a begin(%d) @ %s" % (user.username, activity_type, start_datetime))
            # Don't mark current language when closing an old one
            cls.end_user_activity(user=user, activity_type=activity_type, end_datetime=cur_log.last_active_datetime)  # can't suppress save
            cur_log = None

        # Create a new entry
        logging.debug("%s: BEGIN activity(%d) @ %s" % (user.username, activity_type, start_datetime))
        cur_log = cls(user=user, activity_type=activity_type, start_datetime=start_datetime, last_active_datetime=start_datetime, language=language)
        if not suppress_save:
            cur_log.save()

        return cur_log
        def validate_times(srt_content, srt_issues):
            times = re.findall("([0-9:,]+) --> ([0-9:,]+)\r\n", srt_content, re.S | re.M)

            parse_time = lambda str: datetime.datetime.strptime(str, "%H:%M:%S,%f")
            for i in range(len(times)):
                try:
                    between_subtitle_time = datediff(
                        parse_time(times[i][0]), parse_time(times[i - 1][1] if i > 0 else "00:00:00,000")
                    )
                    within_subtitle_time = datediff(parse_time(times[i][1]), parse_time(times[i][0]))

                    if between_subtitle_time > 60.0:
                        srt_issues.append("Between-subtitle gap of %5.2f seconds" % between_subtitle_time)

                    if within_subtitle_time > 60.0:
                        srt_issues.append("Within-subtitle duration of %5.2f seconds" % within_subtitle_time)
                    elif within_subtitle_time == 0.0:
                        logging.debug("Subtitle flies by too fast (%s --> %s)." % times[i])

                    # print "Start: %s\tB: %5.2f\tW: %5.2f" % (parse_time(times[i][0]), between_subtitle_time, within_subtitle_time)
                except Exception as e:
                    if not times[i][1].startswith("99:59:59"):
                        srt_issues.append("Error checking times: %s" % e)
                    else:
                        if len(times) - i > 1 and len(times) - i - 1 > len(times) / 10.0:
                            if i == 0:
                                srt_issues.append("No subtitles have a valid starting point.")
                            else:
                                logging.debug(
                                    "Hit end of movie, but %d (of %d) subtitle(s) remain in the queue."
                                    % (len(times) - i - 1, len(times))
                                )
                        break
Beispiel #10
0
def select_best_available_language(target_code, available_codes=None):
    """
    Critical function for choosing the best available language for a resource,
    given a target language code.

    This is used by video and exercise pages, for example,
    to determine what file to serve, based on available resources
    and the current requested language.
    """

    # Scrub the input
    target_code = lcode_to_django_lang(target_code)
    if available_codes is None:
        available_codes = get_installed_language_packs().keys()
    available_codes = [lcode_to_django_lang(lc) for lc in available_codes]

    # Hierarchy of language selection
    if target_code in available_codes:
        actual_code = target_code
    elif target_code.split("-", 1)[0] in available_codes:
        actual_code = target_code.split("-", 1)[0]
    elif settings.LANGUAGE_CODE in available_codes:
        actual_code = settings.LANGUAGE_CODE
    elif "en" in available_codes:
        actual_code = "en"
    elif available_codes:
        actual_code = available_codes[0]
    else:
        actual_code = None

    if actual_code != target_code:
        logging.debug("Requested code %s, got code %s" % (target_code, actual_code))
    return actual_code
def generate_test_files():
    """Insert asterisks as translations in po files"""

    # Open them up and insert asterisks for all empty msgstrs
    logging.info("Generating test po files")
    en_po_dir = os.path.join(settings.LOCALE_PATHS[0], "en/LC_MESSAGES/")
    for po_file in glob.glob(os.path.join(en_po_dir, "*.po")):

        msgid_pattern = re.compile(r'msgid \"(.*)\"\nmsgstr', re.S | re.M)

        content = open(os.path.join(en_po_dir, po_file), 'r').read()
        results = content.split("\n\n")
        with open(os.path.join(en_po_dir, "tmp.po"), 'w') as temp_file:
            # We know the first block is static, so just dump that.
            temp_file.write(results[0])

            # Now work through actual translations
            for result in results[1:]:
                try:
                    msgid = re.findall(msgid_pattern, result)[0]

                    temp_file.write("\n\n")
                    temp_file.write(result.replace("msgstr \"\"", "msgstr \"***%s***\"" % msgid))
                except Exception as e:
                    logging.error("Failed to insert test string: %s\n\n%s\n\n" % (e, result))

        # Once done replacing, rename temp file to overwrite original
        os.rename(os.path.join(en_po_dir, "tmp.po"), os.path.join(en_po_dir, po_file))

        (out, err, rc) = compile_po_files("en")
        if err:
            logging.debug("Error executing compilemessages: %s" % err)
Beispiel #12
0
    def handle(self, *args, **options):

        if len(args)==1 and args[0]== "test":
            # Callback for "weak" test--checks at least that the django project compiles (local_settings is OK)
            sys.stdout.write("Success!\n")
            exit(0)

        try:
            if options.get("branch", None):
                # Specified a repo
                self.update_via_git(**options)

            elif options.get("zip_file", None):
                # Specified a file
                if not os.path.exists(options.get("zip_file")):
                    raise CommandError("Specified zip file does not exist: %s" % options.get("zip_file"))
                self.update_via_zip(**options)

            elif options.get("url", None):
                self.update_via_zip(**options)

            elif os.path.exists(settings.PROJECT_PATH + "/../.git"):
                # If we detect a git repo, try git
                if len(args) == 1 and not options["branch"]:
                    options["branch"] = args[0]
                elif len(args) != 0:
                    raise CommandError("Specified too many command-line arguments")
                self.update_via_git(**options)

            elif len(args) > 1:
                raise CommandError("Too many command-line arguments.")

            elif len(args) == 1:
                # Specify zip via first command-line arg
                if options['zip_file'] is not None:
                    raise CommandError("Cannot specify a zipfile as unnamed and named command-line arguments at the same time.")
                options['zip_file'] = args[0]
                self.update_via_zip(**options)

            else:
                # No params, no git repo: try to get a file online.
                zip_file = tempfile.mkstemp()[1]
                for url in ["http://%s/api/download/kalite/latest/%s/%s/" % (settings.CENTRAL_SERVER_HOST, platform.system().lower(), "en")]:
                    logging.info("Downloading repo snapshot from %s to %s" % (url, zip_file))
                    try:
                        urllib.urlretrieve(url, zip_file)
                        sys.stdout.write("success @ %s\n" % url)
                        break;
                    except Exception as e:
                        logging.debug("Failed to get zipfile from %s: %s" % (url, e))
                        continue
                options["zip_file"] = zip_file
                self.update_via_zip(**options)

        except Exception as e:
            if self.started() and not not self.ended():
                self.cancel(stage_status="error", notes=unicode(e))
            raise

        assert self.ended(), "Subroutines should complete() if they start()!"
Beispiel #13
0
def invalidate_on_video_delete(sender, **kwargs):
    """
    Listen in to see when available videos become unavailable.
    """
    was_available = kwargs["instance"] and kwargs["instance"].percent_complete == 100
    if was_available:
        logging.debug("Invalidating cache on delete for %s" % kwargs["instance"])
        invalidate_all_caches()
Beispiel #14
0
def stamp_availability_on_topic(topic, videos_path=settings.CONTENT_ROOT, force=True, stamp_urls=True, update_counts_question_mark= None):
    """ Uses the (json) topic tree to query the django database for which video files exist

    Returns the original topic dictionary, with two properties added to each NON-LEAF node:
      * nvideos_known: The # of videos in and under that node, that are known (i.e. in the Khan Academy library)
      * nvideos_local: The # of vidoes in and under that node, that were actually downloaded and available locally
    And the following property for leaf nodes:
      * on_disk

    Input Parameters:
    * videos_path: the path to video files
    """
    if update_counts_question_mark is None:
        update_counts_question_mark = do_video_counts_need_update_question_mark()


    if not force and "nvideos_local" in topic:
        return (topic, topic["nvideos_local"], topic["nvideos_known"], False)

    nvideos_local = 0
    nvideos_known = 0

    # Can't deal with leaves
    assert topic["kind"] == "Topic", "Should not be calling this function on leaves; it's inefficient!"

    # Only look for videos if there are more branches
    if len(topic["children"]) == 0:
        logging.debug("no children: %s" % topic["path"])

    for child in topic["children"]:
        # RECURSIVE CALL:
        #  The children have children, let them figure things out themselves
        if "children" in child:
            if not force and "nvideos_local" in child:
                continue
            stamp_availability_on_topic(topic=child, videos_path=videos_path, force=force, stamp_urls=stamp_urls, update_counts_question_mark=update_counts_question_mark)
            nvideos_local += child["nvideos_local"]
            nvideos_known += child["nvideos_known"]

    # BASE CASE:
    # All my children are leaves, so we'll query here (a bit more efficient than 1 query per leaf)
    videos = get_videos(topic)
    for video in videos:
        if force or update_counts_question_mark or "availability" not in video:
            stamp_availability_on_video(video, force=force, stamp_urls=stamp_urls, videos_path=videos_path)
        nvideos_local += int(video["on_disk"])
    nvideos_known += len(videos)
    nvideos_available = nvideos_local if not settings.BACKUP_VIDEO_SOURCE else nvideos_known

    changed = "nvideos_local" in topic and topic["nvideos_local"] != nvideos_local
    changed = changed or ("nvideos_known" in topic and topic["nvideos_known"] != nvideos_known)
    topic["nvideos_local"] = nvideos_local
    topic["nvideos_known"] = nvideos_known
    topic["nvideos_available"] = nvideos_available
    topic["available"] = bool(nvideos_local) or bool(settings.BACKUP_VIDEO_SOURCE)

    return (topic, nvideos_local, nvideos_known, nvideos_available, changed)
def update_all_distributed_callback(request):
    """
    """

    if request.method != "POST":
        raise PermissionDenied("Only POST allowed to this URL endpoint.")

    videos = json.loads(request.POST["video_logs"])
    exercises = json.loads(request.POST["exercise_logs"])
    user = FacilityUser.objects.get(id=request.POST["user_id"])
    node_cache = get_node_cache()
    # Save videos
    n_videos_uploaded = 0
    for video in videos:
        video_id = video['video_id']
        youtube_id = video['youtube_id']

        # Only save video logs for videos that we recognize.
        if video_id not in node_cache["Video"]:
            logging.warn("Skipping unknown video %s" % video_id)
            continue

        try:
            (vl, _) = VideoLog.get_or_initialize(user=user, video_id=video_id, youtube_id=youtube_id)
            for key,val in video.iteritems():
                setattr(vl, key, val)
            logging.debug("Saving video log for %s: %s" % (video_id, vl))
            vl.save()
            n_videos_uploaded += 1
        except KeyError:  #
            logging.error("Could not save video log for data with missing values: %s" % video)
        except Exception as e:
            error_message = "Unexpected error importing videos: %s" % e
            return JsonResponseMessageError(error_message)

    # Save exercises
    n_exercises_uploaded = 0
    for exercise in exercises:
        # Only save video logs for videos that we recognize.
        if exercise['exercise_id'] not in node_cache['Exercise']:
            logging.warn("Skipping unknown video %s" % exercise['exercise_id'])
            continue

        try:
            (el, _) = ExerciseLog.get_or_initialize(user=user, exercise_id=exercise["exercise_id"])
            for key,val in exercise.iteritems():
                setattr(el, key, val)
            logging.debug("Saving exercise log for %s: %s" % (exercise['exercise_id'], el))
            el.save()
            n_exercises_uploaded += 1
        except KeyError:
            logging.error("Could not save exercise log for data with missing values: %s" % exercise)
        except Exception as e:
            error_message = "Unexpected error importing exercises: %s" % e
            return JsonResponseMessageError(error_message)

    return JsonResponse({"success": "Uploaded %d exercises and %d videos" % (n_exercises_uploaded, n_videos_uploaded)})
Beispiel #16
0
def invalidate_all_caches():
    """
    Basic entry-point for clearing necessary caches.  Most functions can
    call in here.
    """
    if caching_is_enabled():
        invalidate_inmemory_caches()
        invalidate_web_cache()
        logging.debug("Great success emptying all caches.")
def write_count_to_json(subtitle_counts, data_path):
    """Write JSON to file in static/data/subtitles/"""
    current_counts = softload_json(SUBTITLE_COUNTS_FILEPATH, logger=logging.error)
    current_counts.update(subtitle_counts)

    logging.debug("Writing fresh srt counts to %s" % SUBTITLE_COUNTS_FILEPATH)
    with open(SUBTITLE_COUNTS_FILEPATH, 'wb') as fp:
        # sort here, so we don't have to sort later when seving to clients
        json.dump(current_counts, fp, sort_keys=True)
Beispiel #18
0
def save_topic_tree(topic_tree=None, node_cache=None, data_path=os.path.join(settings.PROJECT_PATH, "static", "data")):
    assert bool(topic_tree) + bool(node_cache) == 1, "Must specify either topic_tree or node_cache parameter"

    # Dump the topic tree (again)
    topic_tree = topic_tree or node_cache["Topic"]["root"][0]

    dest_filepath = os.path.join(data_path, topic_tools.topics_file)
    logging.debug("Saving topic tree to %s" % dest_filepath)
    with open(dest_filepath, "w") as fp:
        fp.write(json.dumps(topic_tree, indent=2))
 def test_get_exercise_load_status(self):
     for path in get_exercise_paths():
         logging.debug("Testing path : " + path)
         self.browser.get(self.live_server_url + path)
         error_list = self.browser.execute_script("return window.js_errors;")
         if error_list:
             logging.error("Found JS error(s) while loading path: " + path)
             for e in error_list:
                 logging.error(e)
         self.assertFalse(error_list)
Beispiel #20
0
def get_file2id_map(force=False):
    global YT2ID_MAP
    if YT2ID_MAP is None or force:
        YT2ID_MAP = {}
        for lang_code, dic in get_dubbed_video_map().iteritems():
            for english_youtube_id, dubbed_youtube_id in dic.iteritems():
                if dubbed_youtube_id in YT2ID_MAP:
                    logging.debug("conflicting entry of dubbed_youtube_id %s in %s dubbed video map" % (dubbed_youtube_id, lang_code))
                YT2ID_MAP[dubbed_youtube_id] = english_youtube_id  # assumes video id is the english youtube_id
    return YT2ID_MAP
def get_exercise_page_paths(video_id=None):

    try:
        exercise_paths = set()
        for exercise in topic_tools.get_related_exercises(videos=topic_tools.get_node_cache("Video")[video_id]):
            exercise_paths = exercise_paths.union(set([exercise["path"]]))
        return list(exercise_paths)
    except Exception as e:
        logging.debug("Exception while getting exercise paths: %s" % e)
        return []
 def wait_until_starttime(starttime):
     time_to_sleep = (self.random.random() * 10.0) + 10
     if self.verbosity >= 1:
         print("(" + str(self.behavior_profile-24601) + ") waiting until it's time to start (%.1fs)." % time_to_sleep)
     time.sleep(time_to_sleep) #sleep
     now = datetime.datetime.today()
     if now.hour >= int(starttime[:2]):
         if now.minute >= int(starttime[-2:]):
             return False
     logging.debug("Go!")
     return True
 def create_signature_file(inner_zip_file):
     signature_file = os.path.splitext(inner_zip_file)[0] + "_signature.txt"
     logging.debug("Generating signature; saving to %s" % signature_file)
     if settings.DEBUG or not os.path.exists(signature_file):  # always regenerate in debug mode
         key = Device.get_own_device().get_key()
         chunk_size = int(2E5)  #200kb chunks
         signature = key.sign_large_file(inner_zip_file, chunk_size=chunk_size)
         with open(signature_file, "w") as fp:
             fp.write("%d\n" % chunk_size)
             fp.write(signature)
     return signature_file
def get_available_language_packs(request, version):
    """Return dict of available language packs"""

    # On central, loop through available language packs in static/language_packs/
    try:
        with open(get_language_pack_availability_filepath(version=version), "r") as fp:
            language_packs_available = json.load(fp)
    except Exception as e:
        logging.debug("Unexpected error getting available language packs: %s" % e)
        language_packs_available = {}
    return JsonResponse(sorted(language_packs_available.values(), key=lambda lp: lp["name"].lower()))
    def run(self):
        jobs = Job.objects.due()
        prof_string = "" if not self.do_profile else "[%8.2f MB] " % memory_profiler.memory_usage()[0]

        if jobs:
            logger.info("%sRunning %d due jobs... (%s)" % (prof_string, jobs.count(), ", ".join(['"%s"' % job.name for job in jobs])))
            call_command('cron')
        else:
            logger.debug("%sNo jobs due to run." % prof_string)

        if self.do_gc:
            gc.collect()
def set_request_language(request, lang_code):
    # each request can get the language from the querystring, or from the currently set session language

    lang_code = select_best_available_language(lang_code)  # output is in django_lang format

    if lang_code != request.session.get(settings.LANGUAGE_COOKIE_NAME):
        logging.debug("setting request language to %s (session language %s), from %s" % (lang_code, request.session.get("default_language"), request.session.get(settings.LANGUAGE_COOKIE_NAME)))
        # Just in case we have a db-backed session, don't write unless we have to.
        request.session[settings.LANGUAGE_COOKIE_NAME] = lang_code

    request.language = lcode_to_ietf(lang_code)
    translation.activate(request.language)
Beispiel #27
0
def invalidate_on_video_update(sender, **kwargs):
    """
    Listen in to see when videos become available.
    """
    # Can only do full check in Django 1.5+, but shouldn't matter--we should only save with
    # percent_complete == 100 once.
    just_now_available = kwargs["instance"] and kwargs["instance"].percent_complete == 100 #and "percent_complete" in kwargs["updated_fields"]
    if just_now_available:
        # This event should only happen once, so don't bother checking if
        #   this is the field that changed.
        logging.debug("Invalidating cache on save for %s" % kwargs["instance"])
        invalidate_all_caches()
def move_video_sizes_file(lang_code):
    lang_pack_location = os.path.join(LOCALE_ROOT, lang_code)
    filename = os.path.basename(REMOTE_VIDEO_SIZE_FILEPATH)
    src_path = os.path.join(lang_pack_location, filename)
    dest_path = REMOTE_VIDEO_SIZE_FILEPATH

    # replace the old remote_video_size json
    if not os.path.exists(src_path):
        logging.error("Could not find videos sizes file (%s)" % src_path)
    else:
        logging.debug('Moving %s to %s' % (src_path, dest_path))
        shutil.move(src_path, dest_path)
Beispiel #29
0
def javascript_catalog_cached(request):
    global JS_CATALOG_CACHE
    lang = request.session['default_language']
    if lang in JS_CATALOG_CACHE:
        logging.debug('Using js translation catalog cache for %s' % lang)
        src = JS_CATALOG_CACHE[lang]
        return HttpResponse(src, 'text/javascript')
    else:
        logging.debug('Generating js translation catalog for %s' % lang)
        resp = javascript_catalog(request, 'djangojs', settings.INSTALLED_APPS)
        src = resp.content
        JS_CATALOG_CACHE[lang] = src
        return resp
Beispiel #30
0
    def mark_as_completed(self, stage_status=None, notes=None):
        """
        Completes stage and process percents, stamps end time.
        """
        logging.debug("Completing process %s" % (self.process_name))

        self.stage_percent = 1.
        self.process_percent = 1.
        self.current_stage = self.total_stages
        self.end_time = datetime.datetime.now()
        self.stage_status = stage_status or self.stage_status  # don't change this to None by default, so that users can be aware of any faults.
        self.completed = True
        self.notes = notes
        self.save()