def get_alt_src_langs(request, user, translation_project): language = translation_project.language project = translation_project.project source_language = project.source_language langs = user.alt_src_langs.exclude( id__in=(language.id, source_language.id)).filter( translationproject__project=project) if not user.alt_src_langs.count(): from pootle_language.models import Language accept = request.META.get('HTTP_ACCEPT_LANGUAGE', '') for accept_lang, __ in parse_accept_lang_header(accept): if accept_lang == '*': continue simplified = data.simplify_to_common(accept_lang) normalized = to_locale(data.normalize_code(simplified)) code = to_locale(accept_lang) if (normalized in ('en', 'en_US', source_language.code, language.code) or code in ('en', 'en_US', source_language.code, language.code)): continue langs = Language.objects.filter( code__in=(normalized, code), translationproject__project=project, ) if langs.count(): break return langs
def get_alt_src_langs(request, user, translation_project): if request.user.is_anonymous: return language = translation_project.language project = translation_project.project source_language = project.source_language langs = list( user.alt_src_langs.exclude( id__in=(language.id, source_language.id) ).filter( translationproject__project=project)) if langs: return langs accept = request.META.get('HTTP_ACCEPT_LANGUAGE', '') for accept_lang, __ in parse_accept_lang_header(accept): if accept_lang == '*': continue normalized = to_locale( data.normalize_code( data.simplify_to_common(accept_lang))) code = to_locale(accept_lang) is_source_lang = any( langcode in ('en', 'en_US', source_language.code, language.code) for langcode in [code, normalized]) if is_source_lang: continue langs = list( Language.objects.filter( code__in=(normalized, code), translationproject__project=project)) if langs: return langs
def get_alt_src_langs(request, profile, translation_project): language = translation_project.language project = translation_project.project source_language = project.source_language langs = profile.alt_src_langs.exclude(id__in=(language.id, source_language.id)).filter( translationproject__project=project ) if not profile.alt_src_langs.count(): from pootle_language.models import Language accept = request.META.get("HTTP_ACCEPT_LANGUAGE", "") for accept_lang, unused in parse_accept_lang_header(accept): if accept_lang == "*": continue normalized = to_locale(data.normalize_code(data.simplify_to_common(accept_lang))) code = to_locale(accept_lang) if normalized in ("en", "en_US", source_language.code, language.code) or code in ( "en", "en_US", source_language.code, language.code, ): continue langs = Language.objects.filter(code__in=(normalized, code), translationproject__project=project) if langs.count(): break return langs
def get_alt_src_langs(request, user, translation_project): language = translation_project.language project = translation_project.project source_language = project.source_language langs = user.alt_src_langs.exclude( id__in=(language.id, source_language.id) ).filter(translationproject__project=project) if not user.alt_src_langs.count(): from pootle_language.models import Language accept = request.META.get('HTTP_ACCEPT_LANGUAGE', '') for accept_lang, unused in parse_accept_lang_header(accept): if accept_lang == '*': continue simplified = data.simplify_to_common(accept_lang) normalized = to_locale(data.normalize_code(simplified)) code = to_locale(accept_lang) if (normalized in ('en', 'en_US', source_language.code, language.code) or code in ('en', 'en_US', source_language.code, language.code)): continue langs = Language.objects.filter( code__in=(normalized, code), translationproject__project=project, ) if langs.count(): break return langs
def lang_to_table(code): # normalize to simplest form result = data.simplify_to_common(code) if data.langcode_ire.match(result): # normalize to legal table name return result.replace("-", "_").replace("@", "_").lower() # illegal language name return None
def get_language_supported(lang_code, supported): normalized = data.normalize_code(data.simplify_to_common(lang_code)) if normalized in supported: return normalized # FIXME: horribly slow way of dealing with languages with @ in them for lang in supported.keys(): if normalized == data.normalize_code(lang): return lang return None
def lang_to_table(code): if code in _table_name_cache: return _table_name_cache[code] # Normalize to simplest form. result = data.simplify_to_common(code) if data.langcode_ire.match(result): # Normalize to legal table name. table_name = result.replace("-", "_").replace("@", "_").lower() _table_name_cache[code] = table_name return table_name # Illegal language name. return None
def __init__(self, filename = None): """Load settings, using the given or default filename""" if not filename: self.filename = os.path.join(get_config_dir(), default_config_name) else: self.filename = filename if not os.path.isfile(self.filename): raise Exception self.language["targetlang"] = data.simplify_to_common(get_locale_lang()) self.config = ConfigParser.RawConfigParser() self.read()
def __init__(self, filename=None): """Load settings, using the given or default filename""" if not filename: self.filename = os.path.join(get_config_dir(), default_config_name) else: self.filename = filename if not os.path.isfile(self.filename): raise Exception self.language["targetlang"] = data.simplify_to_common( get_locale_lang()) self.config = ConfigParser.RawConfigParser() self.read()
def get_locale_lang(): #if we wanted to determine the UI language ourselves, this should work: #lang = locale.getdefaultlocale(('LANGUAGE', 'LC_ALL', 'LC_MESSAGES', 'LANG'))[0] #if not lang and sys.platform == "darwin": # lang = osx_lang() # guess default target lang based on locale, simplify to commonly used form try: lang = locale.getdefaultlocale(('LANGUAGE', 'LC_ALL', 'LANG'))[0] if not lang and sys.platform == "darwin": lang = osx_lang() if lang: return data.simplify_to_common(lang) except Exception, e: logging.warning("%s", e)
def get_alt_src_langs(request, profile, language): langs = profile.alt_src_langs.exclude(id=language.id) if not langs.count(): accept = request.META.get('HTTP_ACCEPT_LANGUAGE', '') codes = [] for accept_lang, unused in parse_accept_lang_header(accept): if accept_lang == '*': continue normalized = to_locale(data.normalize_code(data.simplify_to_common(accept_lang))) if normalized in ['en_US', 'en', language.code]: continue codes.append(normalized) if codes: from pootle_language.models import Language langs = Language.objects.filter(code__in=codes) return langs
def load(self, langcode): # FIXME: what if we get language code with different capitalization? if langcode not in self.languages: try: langcode = self._match_normalized_langcode(langcode) except ValueError: langcode = data.simplify_to_common(langcode) if langcode not in self.languages: try: langcode = self._match_normalized_langcode(langcode) except ValueError: logging.info("unkown language %s" % langcode) self.name = langcode self.code = langcode self.nplurals = 0 self.plural = "" return self.name = gettext_lang(self.languages[langcode][0]) self.code = langcode self.nplurals = self.languages[langcode][1] self.plural = self.languages[langcode][2]
def get_lang_from_http_header(request, supported): """If the user's browser sends a list of preferred languages in the HTTP_ACCEPT_LANGUAGE header, parse it into a list. Then walk through the list, and for each entry, we check whether we have a matching pootle translation project. If so, we return it. If nothing is found, return None.""" accept = request.META.get('HTTP_ACCEPT_LANGUAGE', '') for accept_lang, unused in trans_real.parse_accept_lang_header(accept): if accept_lang == '*': return None normalized = data.normalize_code(data.simplify_to_common(accept_lang, supported)) if normalized in ['en-us', 'en']: return None if normalized in supported: return normalized #FIXME: horribly slow way of dealing with languages with @ in them for lang in supported.keys(): if normalized == data.normalize_code(lang): return lang return None
def load(self, langcode): #FIXME: what if we get language code with different capitalization? if langcode not in self.languages: try: langcode = self._match_normalized_langcode(langcode) except ValueError: langcode = data.simplify_to_common(langcode, self.languages) if langcode not in self.languages: try: langcode = self._match_normalized_langcode(langcode) except ValueError: logging.info("unkown language %s" % langcode) self.name = langcode self.code = langcode self.nplurals = 0 self.plural = "" return self.name = gettext_lang(self.languages[langcode][0]) self.code = langcode self.nplurals = self.languages[langcode][1] self.plural = self.languages[langcode][2]
def test_simplify_to_common(): """test language code simplification""" assert data.simplify_to_common("af_ZA") == "af" assert data.simplify_to_common("pt_PT") == "pt" assert data.simplify_to_common("pt_BR") == "pt_BR"