def validate_language_map(lang_codes):
    """
    This function will tell you any blockers that you'll hit while
    running this command.

    All srt languages must exist in the language map; missing languages
    will cause errors during command running (which can be long).
    This function avoids that problem by doing the above consistency check.
    """
    lang_codes = lang_codes or get_all_prepped_lang_codes()
    missing_langs = []
    for lang_code in lang_codes:
        try:
            get_language_name(lcode_to_ietf(lang_code), error_on_missing=True)
        except LanguageNotFoundError:
            missing_langs.append(lang_code)

    if missing_langs:
        logging.warn("Please add the following language codes to %s:\n\t%s" % (LANG_LOOKUP_FILEPATH, missing_langs))
Пример #2
0
def get_new_counts(language_code, data_path=settings.SUBTITLES_DATA_ROOT, locale_root=LOCALE_ROOT):
    """Write a new dictionary of srt file counts in respective download folders"""
    language_subtitle_count = {}
    subtitles_path = get_srt_path(language_code)
    lang_name = get_language_name(language_code)

    try:
        count = len(glob.glob("%s/*.srt" % subtitles_path))
        logging.info("%4d subtitles for %-20s" % (count, lang_name))

        language_subtitle_count[lang_name] = {}
        language_subtitle_count[lang_name]["count"] = count
        language_subtitle_count[lang_name]["code"] = language_code
    except LanguageNameDoesNotExist as ldne:
        logging.warn(ldne)
    except:
        logging.info("%-4s subtitles for %-20s" % ("No", lang_name))

    write_new_json(language_subtitle_count, data_path)
    return language_subtitle_count[lang_name].get("count")
Пример #3
0
def get_new_counts(language_code,
                   data_path=settings.SUBTITLES_DATA_ROOT,
                   locale_root=LOCALE_ROOT):
    """Write a new dictionary of srt file counts in respective download folders"""
    language_subtitle_count = {}
    subtitles_path = get_srt_path(language_code)
    lang_name = get_language_name(language_code)

    try:
        count = len(glob.glob("%s/*.srt" % subtitles_path))
        logging.info("%4d subtitles for %-20s" % (count, lang_name))

        language_subtitle_count[lang_name] = {}
        language_subtitle_count[lang_name]["count"] = count
        language_subtitle_count[lang_name]["code"] = language_code
    except LanguageNameDoesNotExist as ldne:
        logging.warn(ldne)
    except:
        logging.info("%-4s subtitles for %-20s" % ("No", lang_name))

    write_new_json(language_subtitle_count, data_path)
    return language_subtitle_count[lang_name].get("count")
def store_new_counts(lang_code, data_path=SUBTITLES_DATA_ROOT, locale_root=LOCALE_ROOT):
    """Write a new dictionary of srt file counts in respective download folders"""
    language_subtitle_count = {}
    subtitles_path = get_srt_path(lang_code)
    lang_name = get_language_name(lang_code)

    try:
        count = len(glob.glob("%s/*.srt" % subtitles_path))

        language_subtitle_count[lang_name] = {}
        language_subtitle_count[lang_name]["count"] = count
        language_subtitle_count[lang_name]["code"] = lang_code
    except LanguageNameDoesNotExist as ldne:
        count = 0
        logging.debug(ldne)
    except:
        count = 0
        logging.info("%-4s subtitles for %-20s" % ("No", lang_name))

    # Always write to disk.
    write_count_to_json(language_subtitle_count, data_path)

    return count
def generate_metadata(lang_codes=None, broken_langs=None, added_ka=False):
    """Loop through locale folder, create or update language specific meta
    and create or update master file, skipping broken languages

    note: broken_langs must be in django format.

    """
    logging.info("Generating new language pack metadata")

    if broken_langs is None:
        broken_langs = tuple()

    lang_codes = lang_codes or os.listdir(LOCALE_ROOT)
    try:
        with open(get_language_pack_availability_filepath(), "r") as fp:
            master_metadata = json.load(fp)
        if isinstance(master_metadata, list):
            logging.info("Code switched from list to dict to support single language LanguagePack updates; converting your old list storage for dictionary storage.")
            master_list = master_metadata
            master_metadata = {}
            for lang_meta in master_list:
                master_metadata[lang_meta["code"]] = lang_meta
    except Exception as e:
        logging.warn("Error opening language pack metadata: %s; resetting" % e)
        master_metadata = {}

    # loop through all languages in locale, update master file
    crowdin_meta_dict = download_crowdin_metadata()
    with open(SUBTITLE_COUNTS_FILEPATH, "r") as fp:
        subtitle_counts = json.load(fp)

    for lc in lang_codes:
        lang_code_django = lcode_to_django_dir(lc)
        lang_code_ietf = lcode_to_ietf(lc)
        lang_name = get_language_name(lang_code_ietf)

        # skips anything not a directory, or with errors
        if not os.path.isdir(os.path.join(LOCALE_ROOT, lang_code_django)):
            logging.info("Skipping item %s because it is not a directory" % lang_code_django)
            continue
        elif lang_code_django in broken_langs:  # broken_langs is django format
            logging.info("Skipping directory %s because it triggered an error during compilemessages. The admins should have received a report about this and must fix it before this pack will be updateed." % lang_code_django)
            continue

        # Gather existing metadata
        crowdin_meta = next((meta for meta in crowdin_meta_dict if meta["code"] == lang_code_ietf), {})
        metadata_filepath = get_language_pack_metadata_filepath(lang_code_ietf)
        try:
            with open(metadata_filepath) as fp:
                local_meta = json.load(fp)
        except Exception as e:
            logging.warn("Error opening language pack metadata (%s): %s; resetting" % (metadata_filepath, e))
            local_meta = {}

        try:
            # update metadata
            updated_meta = {
                "code": lcode_to_ietf(crowdin_meta.get("code") or lang_code_django),  # user-facing code
                "name": (crowdin_meta.get("name") or lang_name),
                "percent_translated": int(crowdin_meta.get("approved_progress", 0)),
                "phrases": int(crowdin_meta.get("phrases", 0)),
                "approved_translations": int(crowdin_meta.get("approved", 0)),
            }

            # Obtain current number of subtitles
            entry = subtitle_counts.get(lang_name, {})
            srt_count = entry.get("count", 0)

            updated_meta.update({
                "software_version": version.VERSION,
                "subtitle_count": srt_count,
            })

        except LanguageNotFoundError:
            logging.error("Unrecognized language; must skip item %s" % lang_code_django)
            continue

        language_pack_version = increment_language_pack_version(local_meta, updated_meta)
        updated_meta["language_pack_version"] = language_pack_version + int(added_ka)
        local_meta.update(updated_meta)

        # Write locally (this is used on download by distributed server to update it's database)
        with open(metadata_filepath, 'w') as output:
            json.dump(local_meta, output)

        # Update master (this is used for central server to handle API requests for data)
        master_metadata[lang_code_ietf] = local_meta

    # Save updated master
    ensure_dir(os.path.dirname(get_language_pack_availability_filepath()))
    with open(get_language_pack_availability_filepath(), 'w') as output:
        json.dump(master_metadata, output)
    logging.info("Local record of translations updated")
Пример #6
0
def generate_metadata(lang_codes=None, broken_langs=None):
    """Loop through locale folder, create or update language specific meta and create or update master file, skipping broken languages"""

    logging.info("Generating new po file metadata")
    master_file = []

    # loop through all languages in locale, update master file
    crowdin_meta_dict = get_crowdin_meta()
    subtitle_counts = json.loads(open(settings.SUBTITLES_DATA_ROOT + "subtitle_counts.json").read())
    for lang in os.listdir(LOCALE_ROOT):

        # skips anything not a directory
        if not os.path.isdir(os.path.join(LOCALE_ROOT, lang)):
            logging.info("Skipping %s because it is not a directory" % lang)
            continue
        elif lang in broken_langs:
            logging.info("Skipping %s because it triggered an error during compilemessages. The admins should have received a report about this and must fix it before this pack will be updateed." % lang)
            continue

        crowdin_meta = next((meta for meta in crowdin_meta_dict if meta["code"] == convert_language_code_format(lang_code=lang, for_crowdin=True)), {})
        try:
            local_meta = json.loads(open(os.path.join(LOCALE_ROOT, lang, "%s_metadata.json" % lang)).read())
        except:
            local_meta = {}

        try:
            # update metadata
            updated_meta = {
                "code": crowdin_meta.get("code") or convert_language_code_format(lang),
                "name": crowdin_meta.get("name") or get_language_name(convert_language_code_format(lang)),
                "percent_translated": int(crowdin_meta.get("approved_progress", 0)),
                "phrases": int(crowdin_meta.get("phrases", 0)),
                "approved_translations": int(crowdin_meta.get("approved", 0)),
            }

            # Obtain current number of subtitles
            entry = subtitle_counts.get(get_language_name(lang), {})
            srt_count = entry.get("count", 0)

            updated_meta.update({
                "software_version": version.VERSION,
                "subtitle_count": srt_count,
            })

        except LanguageNotFoundError:
            logging.error("Unrecognized language; must skip: %s" % lang)
            continue

        language_pack_version = increment_language_pack_version(local_meta, updated_meta)
        updated_meta["language_pack_version"] = language_pack_version
        local_meta.update(updated_meta)

        # Write locally (this is used on download by distributed server to update it's database)
        with open(os.path.join(LOCALE_ROOT, lang, "%s_metadata.json" % lang), 'w') as output:
            json.dump(local_meta, output)

        # Update master (this is used for central server to handle API requests for data)
        master_file.append(local_meta)

    # Save updated master
    ensure_dir(settings.LANGUAGE_PACK_ROOT)
    with open(os.path.join(settings.LANGUAGE_PACK_ROOT, LANGUAGE_PACK_AVAILABILITY_FILENAME), 'w') as output:
        json.dump(master_file, output)
    logging.info("Local record of translations updated")