예제 #1
0
    def handle(self, *args, **options):
        if settings.CENTRAL_SERVER:
            raise CommandError(
                "This must only be run on distributed servers server.")

        lang_code = lcode_to_ietf(options["lang_code"])
        lang_name = get_language_name(lang_code)
        software_version = options["software_version"]
        logging.info(
            "Downloading language pack for lang_name=%s, software_version=%s" %
            (lang_name, software_version))

        # Download the language pack
        try:
            if options['file']:
                self.start(
                    _("Using local language pack '%(filepath)s'") %
                    {"filepath": options['file']})
                zip_filepath = options['file']
            else:
                self.start(
                    _("Downloading language pack '%(lang_code)s'") %
                    {"lang_code": lang_code})
                zip_filepath = get_language_pack(lang_code,
                                                 software_version,
                                                 callback=self.cb)

            # Unpack into locale directory
            self.next_stage(
                _("Unpacking language pack '%(lang_code)s'") %
                {"lang_code": lang_code})
            unpack_language(lang_code, zip_filepath=zip_filepath)

            #
            self.next_stage(
                _("Creating static files for language pack '%(lang_code)s'") %
                {"lang_code": lang_code})
            update_jsi18n_file(lang_code)

            self.next_stage(
                _("Moving files to their appropriate local disk locations."))
            move_dubbed_video_map(lang_code)
            move_exercises(lang_code)
            move_srts(lang_code)
            move_video_sizes_file(lang_code)

            self.next_stage()
            call_command("collectstatic", interactive=False)

            self.next_stage(_("Invalidate caches"))
            caching.invalidate_all_caches()

            self.complete(
                _("Finished processing language pack %(lang_name)s.") %
                {"lang_name": get_language_name(lang_code)})
        except Exception as e:
            self.cancel(stage_status="error",
                        notes=_("Error: %(error_msg)s") %
                        {"error_msg": unicode(e)})
            raise
예제 #2
0
    def __init__(self, facility, admin_access=False, *args, **kwargs):
        super(FacilityUserForm, self).__init__(*args, **kwargs)

        self.admin_access = admin_access
        self.fields["default_language"].choices = [
            (lang_code, get_language_name(lang_code))
            for lang_code in get_installed_language_packs()
        ]

        # Select the initial default language,
        #   but only if we're not in the process of updating it to something else.
        if not self.fields[
                "default_language"].initial and "default_language" not in self.changed_data:
            self.fields["default_language"].initial = (
                self.instance
                and self.instance.default_language) or get_default_language()

        # Passwords only required on new, not on edit
        self.fields["password_first"].required = self.instance.pk == ""
        self.fields["password_recheck"].required = self.instance.pk == ""

        # Across POST and GET requests
        self.fields["zone_fallback"].initial = facility.get_zone()
        self.fields["facility"].initial = facility
        self.fields["facility"].queryset = Facility.objects.by_zone(
            facility.get_zone())
        self.fields["group"].queryset = FacilityGroup.objects.filter(
            facility=facility)
예제 #3
0
def store_new_counts(lang_code,
                     data_path=SUBTITLES_DATA_ROOT,
                     locale_root=LOCALE_ROOT):
    """Write a new dictionary of srt file counts in respective download folders"""
    language_subtitle_count = {}
    subtitles_path = get_srt_path(lang_code)
    lang_name = get_language_name(lang_code)

    try:
        count = len(glob.glob("%s/*.srt" % subtitles_path))

        language_subtitle_count[lang_name] = {}
        language_subtitle_count[lang_name]["count"] = count
        language_subtitle_count[lang_name]["code"] = lang_code
    except LanguageNameDoesNotExist as ldne:
        count = 0
        logging.debug(ldne)
    except:
        count = 0
        logging.info("%-4s subtitles for %-20s" % ("No", lang_name))

    # Always write to disk.
    write_count_to_json(language_subtitle_count, data_path)

    return count
예제 #4
0
def validate_language_map(lang_codes):
    """
    This function will tell you any blockers that you'll hit while
    running this command.

    All srt languages must exist in the language map; missing languages
    will cause errors during command running (which can be long).
    This function avoids that problem by doing the above consistency check.
    """
    lang_codes = lang_codes or get_all_prepped_lang_codes()
    missing_langs = []
    for lang_code in lang_codes:
        try:
            get_language_name(lcode_to_ietf(lang_code), error_on_missing=True)
        except LanguageNotFoundError:
            missing_langs.append(lang_code)

    if missing_langs:
        logging.warn("Please add the following language codes to %s:\n\t%s" % (
            settings.LANG_LOOKUP_FILEPATH, missing_langs,
        ))
예제 #5
0
def validate_language_map(lang_codes):
    """
    This function will tell you any blockers that you'll hit while
    running this command.

    All srt languages must exist in the language map; missing languages
    will cause errors during command running (which can be long).
    This function avoids that problem by doing the above consistency check.
    """
    lang_codes = lang_codes or get_all_prepped_lang_codes()
    missing_langs = []
    for lang_code in lang_codes:
        try:
            get_language_name(lcode_to_ietf(lang_code), error_on_missing=True)
        except LanguageNotFoundError:
            missing_langs.append(lang_code)

    if missing_langs:
        logging.warn("Please add the following language codes to %s:\n\t%s" % (
            settings.LANG_LOOKUP_FILEPATH,
            missing_langs,
        ))
예제 #6
0
def delete_language_pack(request):
    """
    API endpoint for deleting language pack which fetches the language code (in delete_id) which has to be deleted.
    That particular language folders are deleted and that language gets removed.
    """
    lang_code = simplejson.loads(request.body or "{}").get("lang")
    delete_language(lang_code)

    return JsonResponse({
        "success":
        _("Successfully deleted language pack for %(lang_name)s.") % {
            "lang_name": get_language_name(lang_code)
        }
    })
예제 #7
0
def start_languagepack_download(request):
    if not request.method == 'POST':
        raise Exception(_("Must call API endpoint with POST verb."))

    data = json.loads(
        request.raw_post_data
    )  # Django has some weird post processing into request.POST, so use .body
    lang_code = lcode_to_ietf(data['lang'])

    force_job('languagepackdownload',
              _("Language pack download"),
              lang_code=lang_code,
              locale=request.language)

    return JsonResponseMessageSuccess(
        _("Successfully started language pack download for %(lang_name)s.") %
        {"lang_name": get_language_name(lang_code)})
예제 #8
0
파일: forms.py 프로젝트: 2flcastro/ka-lite
    def __init__(self, facility, *args, **kwargs):
        super(FacilityUserForm, self).__init__(*args, **kwargs)

        self.fields["default_language"].choices = [(lang_code, get_language_name(lang_code)) for lang_code in get_installed_language_packs()]

        # Select the initial default language,
        #   but only if we're not in the process of updating it to something else.
        if not self.fields["default_language"].initial and "default_language" not in self.changed_data:
            self.fields["default_language"].initial = (self.instance and self.instance.default_language) or get_default_language()

        # Passwords only required on new, not on edit
        self.fields["password_first"].required = self.instance.pk == ""
        self.fields["password_recheck"].required = self.instance.pk == ""

        # Across POST and GET requests
        self.fields["zone_fallback"].initial = facility.get_zone()
        self.fields["facility"].initial = facility
        self.fields["facility"].queryset = Facility.objects.by_zone(facility.get_zone())
        self.fields["group"].queryset = FacilityGroup.objects.filter(facility=facility)
예제 #9
0
    def handle(self, *args, **options):
        if settings.CENTRAL_SERVER:
            raise CommandError("This must only be run on distributed servers server.")

        lang_code = lcode_to_ietf(options["lang_code"])
        lang_name = get_language_name(lang_code)
        software_version = options["software_version"]
        logging.info("Downloading language pack for lang_name=%s, software_version=%s" % (lang_name, software_version))

        # Download the language pack
        try:
            if options['file']:
                self.start(_("Using local language pack '%(filepath)s'") % {"filepath": options['file']})
                zip_filepath = options['file']
            else:
                self.start(_("Downloading language pack '%(lang_code)s'") % {"lang_code": lang_code})
                zip_filepath = get_language_pack(lang_code, software_version, callback=self.cb)

            # Unpack into locale directory
            self.next_stage(_("Unpacking language pack '%(lang_code)s'") % {"lang_code": lang_code})
            unpack_language(lang_code, zip_filepath=zip_filepath)

            #
            self.next_stage(_("Creating static files for language pack '%(lang_code)s'") % {"lang_code": lang_code})
            update_jsi18n_file(lang_code)


            self.next_stage(_("Moving files to their appropriate local disk locations."))
            move_dubbed_video_map(lang_code)
            move_exercises(lang_code)
            move_srts(lang_code)
            move_video_sizes_file(lang_code)

            self.next_stage()
            call_command("collectstatic", interactive=False)

            self.next_stage(_("Invalidate caches"))
            caching.invalidate_all_caches()

            self.complete(_("Finished processing language pack %(lang_name)s.") % {"lang_name": get_language_name(lang_code)})
        except Exception as e:
            self.cancel(stage_status="error", notes=_("Error: %(error_msg)s") % {"error_msg": unicode(e)})
            raise
예제 #10
0
def store_new_counts(lang_code, data_path=SUBTITLES_DATA_ROOT, locale_root=LOCALE_ROOT):
    """Write a new dictionary of srt file counts in respective download folders"""
    language_subtitle_count = {}
    subtitles_path = get_srt_path(lang_code)
    lang_name = get_language_name(lang_code)

    try:
        count = len(glob.glob("%s/*.srt" % subtitles_path))

        language_subtitle_count[lang_name] = {}
        language_subtitle_count[lang_name]["count"] = count
        language_subtitle_count[lang_name]["code"] = lang_code
    except LanguageNameDoesNotExist as ldne:
        count = 0
        logging.debug(ldne)
    except:
        count = 0
        logging.info("%-4s subtitles for %-20s" % ("No", lang_name))

    # Always write to disk.
    write_count_to_json(language_subtitle_count, data_path)

    return count
예제 #11
0
파일: api_views.py 프로젝트: SG345/ka-lite
def delete_language_pack(request):
    """
    API endpoint for deleting language pack which fetches the language code (in delete_id) which has to be deleted.
    That particular language folders are deleted and that language gets removed.
    """
    lang_code = simplejson.loads(request.body or "{}").get("lang")
    delete_language(lang_code)

    return JsonResponse({"success": _("Successfully deleted language pack for %(lang_name)s.") % {"lang_name": get_language_name(lang_code)}})
예제 #12
0
파일: api_views.py 프로젝트: SG345/ka-lite
def start_languagepack_download(request):
    if not request.method == 'POST':
        raise Exception(_("Must call API endpoint with POST verb."))

    data = json.loads(request.raw_post_data)  # Django has some weird post processing into request.POST, so use .body
    lang_code = lcode_to_ietf(data['lang'])

    force_job('languagepackdownload', _("Language pack download"), lang_code=lang_code, locale=request.language)

    return JsonResponseMessageSuccess(_("Successfully started language pack download for %(lang_name)s.") % {"lang_name": get_language_name(lang_code)})
예제 #13
0
def get_content_cache(force=False, annotate=False, language=None):

    if not language:
        language = django_settings.LANGUAGE_CODE

    global CONTENT

    if CONTENT is None:
        CONTENT = {}

    if CONTENT.get(language) is None:
        content = None
        if settings.DO_NOT_RELOAD_CONTENT_CACHE_AT_STARTUP and not force:
            content = softload_sqlite_cache(settings.CONTENT_CACHE_FILEPATH)
        if content:
            CONTENT[language] = content
            return CONTENT[language]
        else:
            if settings.DO_NOT_RELOAD_CONTENT_CACHE_AT_STARTUP:
                call_command("create_content_db")
                content = softload_sqlite_cache(settings.CONTENT_CACHE_FILEPATH)
            else:
                content = softload_json(settings.CONTENT_FILEPATH, logger=logging.debug, raises=False)
            CONTENT[language] = content
            annotate = True

    if annotate:

        # Loop through all content items and put thumbnail urls, content urls,
        # and subtitle urls on the content dictionary, and list all languages
        # that the content is available in.
        try:
            contents_folder = os.listdir(django_settings.CONTENT_ROOT)
        except OSError:
            contents_folder = []

        subtitle_langs = {}

        if os.path.exists(i18n.get_srt_path()):
            for (dirpath, dirnames, filenames) in os.walk(i18n.get_srt_path()):
                # Only both looking at files that are inside a 'subtitles' directory
                if os.path.basename(dirpath) == "subtitles":
                    lc = os.path.basename(os.path.dirname(dirpath))
                    for filename in filenames:
                        if filename in subtitle_langs:
                            subtitle_langs[filename].append(lc)
                        else:
                            subtitle_langs[filename] = [lc]

        for key, content in CONTENT[language].iteritems():
            default_thumbnail = create_thumbnail_url(content.get("id"))
            dubmap = i18n.get_id2oklang_map(content.get("id"))
            if dubmap:
                content_lang = i18n.select_best_available_language(language, available_codes=dubmap.keys()) or ""
                if content_lang:
                    dubbed_id = dubmap.get(content_lang)
                    format = content.get("format", "")
                    if (dubbed_id + "." + format) in contents_folder:
                        content["available"] = True
                        thumbnail = create_thumbnail_url(dubbed_id) or default_thumbnail
                        content["content_urls"] = {
                            "stream": django_settings.CONTENT_URL + dubmap.get(content_lang) + "." + format,
                            "stream_type": "{kind}/{format}".format(kind=content.get("kind", "").lower(), format=format),
                            "thumbnail": thumbnail,
                        }
                    elif django_settings.BACKUP_VIDEO_SOURCE:
                        content["available"] = True
                        content["content_urls"] = {
                            "stream": django_settings.BACKUP_VIDEO_SOURCE.format(youtube_id=dubbed_id, video_format=format),
                            "stream_type": "{kind}/{format}".format(kind=content.get("kind", "").lower(), format=format),
                            "thumbnail": django_settings.BACKUP_VIDEO_SOURCE.format(youtube_id=dubbed_id, video_format="png"),
                        }
                    else:
                        content["available"] = False
                else:
                    content["available"] = False
            else:
                content["available"] = False

            # Get list of subtitle language codes currently available
            subtitle_lang_codes = subtitle_langs.get("{id}.srt".format(id=content.get("id")), [])

            # Generate subtitle URLs for any subtitles that do exist for this content item
            subtitle_urls = [{
                "code": lc,
                "url": django_settings.STATIC_URL + "srt/{code}/subtitles/{id}.srt".format(code=lc, id=content.get("id")),
                "name": i18n.get_language_name(lc)
                } for lc in subtitle_lang_codes]

            # Sort all subtitle URLs by language code
            content["subtitle_urls"] = sorted(subtitle_urls, key=lambda x: x.get("code", ""))

            with i18n.translate_block(language):
                content["selected_language"] = content_lang
                content["title"] = _(content["title"])
                content["description"] = _(content.get("description")) if content.get("description") else ""

            CONTENT[language][key] = content

        if settings.DO_NOT_RELOAD_CONTENT_CACHE_AT_STARTUP:
            try:
                CONTENT[language].commit()
            except IOError as e:
                logging.warn("Annotated content cache file failed in saving with error {e}".format(e=e))

    return CONTENT[language]
예제 #14
0
def get_content_cache(force=False, annotate=False, language=settings.LANGUAGE_CODE):
    global CONTENT, CONTENT_FILEPATH

    if CONTENT is None:
        CONTENT = {}
    if CONTENT.get(language) is None:
        CONTENT[language] = softload_json(CONTENT_FILEPATH, logger=logging.debug, raises=False)
        annotate = True

    if annotate:
        if settings.DO_NOT_RELOAD_CONTENT_CACHE_AT_STARTUP and not force:
            content = softload_json(CONTENT_FILEPATH + "_" + language + ".cache", logger=logging.debug, raises=False)
            if content:
                CONTENT[language] = content
                return CONTENT[language]

        # Loop through all content items and put thumbnail urls, content urls,
        # and subtitle urls on the content dictionary, and list all languages
        # that the content is available in.
        for content in CONTENT[language].values():
            default_thumbnail = create_thumbnail_url(content.get("id"))
            dubmap = i18n.get_id2oklang_map(content.get("id"))
            if dubmap:
                content_lang = i18n.select_best_available_language(language, available_codes=dubmap.keys()) or ""
                if content_lang:
                    dubbed_id = dubmap.get(content_lang)
                    format = content.get("format", "")
                    if is_content_on_disk(dubbed_id, format):
                        content["available"] = True
                        thumbnail = create_thumbnail_url(dubbed_id) or default_thumbnail
                        content["content_urls"] = {
                            "stream": settings.CONTENT_URL + dubmap.get(content_lang) + "." + format,
                            "stream_type": "{kind}/{format}".format(kind=content.get("kind", "").lower(), format=format),
                            "thumbnail": thumbnail,
                        }
                    else:
                        content["available"] = False
                else:
                    content["available"] = False
            else:
                content["available"] = False

            # Get list of subtitle language codes currently available
            subtitle_lang_codes = [] if not os.path.exists(i18n.get_srt_path()) else [lc for lc in os.listdir(i18n.get_srt_path()) if os.path.exists(i18n.get_srt_path(lc, content.get("id")))]

            # Generate subtitle URLs for any subtitles that do exist for this content item
            subtitle_urls = [{
                "code": lc,
                "url": settings.STATIC_URL + "srt/{code}/subtitles/{id}.srt".format(code=lc, id=content.get("id")),
                "name": i18n.get_language_name(lc)
                } for lc in subtitle_lang_codes if os.path.exists(i18n.get_srt_path(lc, content.get("id")))]

            # Sort all subtitle URLs by language code
            content["subtitle_urls"] = sorted(subtitle_urls, key=lambda x: x.get("code", ""))

            with i18n.translate_block(content_lang):
                content["selected_language"] = content_lang
                content["title"] = _(content["title"])
                content["description"] = _(content.get("description", "")) if content.get("description") else ""

        if settings.DO_NOT_RELOAD_CONTENT_CACHE_AT_STARTUP:
            try:
                with open(CONTENT_FILEPATH + "_" + language + ".cache", "w") as f:
                    json.dump(CONTENT[language], f)
            except IOError as e:
                logging.warn("Annotated content cache file failed in saving with error {e}".format(e=e))

    return CONTENT[language]
예제 #15
0
def get_content_cache(force=False, annotate=False, language=settings.LANGUAGE_CODE):
    global CONTENT, CONTENT_FILEPATH

    if CONTENT is None:
        CONTENT = {}
    if CONTENT.get(language) is None:
        CONTENT[language] = softload_json(CONTENT_FILEPATH, logger=logging.debug, raises=False)
        annotate = True

    if annotate:
        if settings.DO_NOT_RELOAD_CONTENT_CACHE_AT_STARTUP and not force:
            content = softload_json(CONTENT_FILEPATH + "_" + language + ".cache", logger=logging.debug, raises=False)
            if content:
                CONTENT[language] = content
                return CONTENT[language]

        # Loop through all content items and put thumbnail urls, content urls,
        # and subtitle urls on the content dictionary, and list all languages
        # that the content is available in.
        for content in CONTENT[language].values():
            default_thumbnail = create_thumbnail_url(content.get("id"))
            dubmap = i18n.get_id2oklang_map(content.get("id"))
            if dubmap:
                content_lang = i18n.select_best_available_language(language, available_codes=dubmap.keys()) or ""
                if content_lang:
                    dubbed_id = dubmap.get(content_lang)
                    format = content.get("format", "")
                    if is_content_on_disk(dubbed_id, format):
                        content["available"] = True
                        thumbnail = create_thumbnail_url(dubbed_id) or default_thumbnail
                        content["content_urls"] = {
                            "stream": settings.CONTENT_URL + dubmap.get(content_lang) + "." + format,
                            "stream_type": "{kind}/{format}".format(kind=content.get("kind", "").lower(), format=format),
                            "thumbnail": thumbnail,
                        }
                    elif settings.BACKUP_VIDEO_SOURCE:
                        content["available"] = True
                        content["content_urls"] = {
                            "stream": settings.BACKUP_VIDEO_SOURCE.format(youtube_id=dubbed_id, video_format=format),
                            "stream_type": "{kind}/{format}".format(kind=content.get("kind", "").lower(), format=format),
                            "thumbnail": settings.BACKUP_VIDEO_SOURCE.format(youtube_id=dubbed_id, video_format="png"),
                        }
                    else:
                        content["available"] = False
                else:
                    content["available"] = False
            else:
                content["available"] = False

            # Get list of subtitle language codes currently available
            subtitle_lang_codes = [] if not os.path.exists(i18n.get_srt_path()) else [lc for lc in os.listdir(i18n.get_srt_path()) if os.path.exists(i18n.get_srt_path(lc, content.get("id")))]

            # Generate subtitle URLs for any subtitles that do exist for this content item
            subtitle_urls = [{
                "code": lc,
                "url": settings.STATIC_URL + "srt/{code}/subtitles/{id}.srt".format(code=lc, id=content.get("id")),
                "name": i18n.get_language_name(lc)
                } for lc in subtitle_lang_codes if os.path.exists(i18n.get_srt_path(lc, content.get("id")))]

            # Sort all subtitle URLs by language code
            content["subtitle_urls"] = sorted(subtitle_urls, key=lambda x: x.get("code", ""))

            with i18n.translate_block(content_lang):
                content["selected_language"] = content_lang
                content["title"] = _(content["title"])
                content["description"] = _(content.get("description", "")) if content.get("description") else ""

        if settings.DO_NOT_RELOAD_CONTENT_CACHE_AT_STARTUP:
            try:
                with open(CONTENT_FILEPATH + "_" + language + ".cache", "w") as f:
                    json.dump(CONTENT[language], f)
            except IOError as e:
                logging.warn("Annotated content cache file failed in saving with error {e}".format(e=e))

    return CONTENT[language]
예제 #16
0
def get_content_cache(force=False, annotate=False):
    global CONTENT, CONTENT_FILEPATH

    if CONTENT is None:
        CONTENT = softload_json(CONTENT_FILEPATH,
                                logger=logging.debug,
                                raises=False)
        annotate = True

    if annotate:
        if settings.DO_NOT_RELOAD_CONTENT_CACHE_AT_STARTUP and not force:
            content = softload_json(CONTENT_FILEPATH + ".cache",
                                    logger=logging.debug,
                                    raises=False)
            if content:
                CONTENT = content
                return CONTENT

        # Loop through all content items and put thumbnail urls, content urls,
        # and subtitle urls on the content dictionary, and list all languages
        # that the content is available in.
        for content in CONTENT.values():
            languages = []
            default_thumbnail = create_thumbnail_url(content.get("id"))
            dubmap = i18n.get_id2oklang_map(content.get("id"))
            for lang, dubbed_id in dubmap.items():
                format = content.get("format", "")
                if is_content_on_disk(dubbed_id, format):
                    languages.append(lang)
                    thumbnail = create_thumbnail_url(
                        dubbed_id) or default_thumbnail
                    content["lang_data_" + lang] = {
                        "stream":
                        settings.CONTENT_URL + dubmap.get(lang) + "." + format,
                        "stream_type":
                        "{kind}/{format}".format(kind=content.get("kind",
                                                                  "").lower(),
                                                 format=format),
                        "thumbnail":
                        thumbnail,
                    }
            content["languages"] = languages

            # Get list of subtitle language codes currently available
            subtitle_lang_codes = [] if not os.path.exists(
                i18n.get_srt_path()) else [
                    lc for lc in os.listdir(i18n.get_srt_path())
                    if os.path.exists(i18n.get_srt_path(lc, content.get("id")))
                ]

            # Generate subtitle URLs for any subtitles that do exist for this content item
            subtitle_urls = [
                {
                    "code":
                    lc,
                    "url":
                    settings.STATIC_URL +
                    "srt/{code}/subtitles/{id}.srt".format(
                        code=lc, id=content.get("id")),
                    "name":
                    i18n.get_language_name(lc)
                } for lc in subtitle_lang_codes
                if os.path.exists(i18n.get_srt_path(lc, content.get("id")))
            ]

            # Sort all subtitle URLs by language code
            content["subtitle_urls"] = sorted(subtitle_urls,
                                              key=lambda x: x.get("code", ""))

        if settings.DO_NOT_RELOAD_CONTENT_CACHE_AT_STARTUP:
            try:
                with open(CONTENT_FILEPATH + ".cache", "w") as f:
                    json.dump(CONTENT, f)
            except IOError as e:
                logging.warn(
                    "Annotated content cache file failed in saving with error {e}"
                    .format(e=e))

    return CONTENT