def load(name, merge_inherited=True): """Load the locale data for the given locale. The locale data is a dictionary that contains much of the data defined by the Common Locale Data Repository (CLDR). This data is stored as a collection of pickle files inside the ``babel`` package. >>> d = load('en_US') >>> d['languages']['sv'] u'Swedish' Note that the results are cached, and subsequent requests for the same locale return the same dictionary: >>> d1 = load('en_US') >>> d2 = load('en_US') >>> d1 is d2 True :param name: the locale identifier string (or "root") :param merge_inherited: whether the inherited data should be merged into the data of the requested locale :raise `IOError`: if no locale data file is found for the given locale identifer, or one of the locales it inherits from """ _cache_lock.acquire() try: data = _cache.get(name) if not data: # Load inherited data if name == 'root' or not merge_inherited: data = {} else: from babel.core import get_global parent = get_global('parent_exceptions').get(name) if not parent: parts = name.split('_') if len(parts) == 1: parent = 'root' else: parent = '_'.join(parts[:-1]) data = load(parent).copy() filename = os.path.join(_dirname, '%s.dat' % name) fileobj = open(filename, 'rb') try: if name != 'root' and merge_inherited: merge(data, pickle.load(fileobj)) else: data = pickle.load(fileobj) _cache[name] = data finally: fileobj.close() return data finally: _cache_lock.release()
def get_global(key): """Return the dictionary for the given key in the global data. The global data is stored in the ``babel/global.dat`` file and contains information independent of individual locales. >>> get_global('zone_aliases')['UTC'] u'Etc/GMT' >>> get_global('zone_territories')['Europe/Berlin'] u'DE' :param key: the data key :return: the dictionary found in the global data under the given key :rtype: `dict` :since: version 0.9 """ global _global_data if _global_data is None: dirname = os.path.join(os.path.dirname(__file__)) filename = os.path.join(dirname, 'global.dat') if not os.path.isfile(filename): _raise_no_data_error() fileobj = open(filename, 'rb') try: _global_data = pickle.load(fileobj) finally: fileobj.close() return _global_data.get(key, {})
def get_global(key): """Return the dictionary for the given key in the global data. The global data is stored in the ``babel/global.dat`` file and contains information independent of individual locales. >>> get_global('zone_aliases')['UTC'] u'Etc/GMT' >>> get_global('zone_territories')['Europe/Berlin'] u'DE' .. versionadded:: 0.9 :param key: the data key """ global _global_data if _global_data is None: dirname = os.path.join(os.path.dirname(__file__)) filename = os.path.join(dirname, 'global.dat') if not os.path.isfile(filename): _raise_no_data_error() fileobj = open(filename, 'rb') try: _global_data = pickle.load(fileobj) finally: fileobj.close() return _global_data.get(key, {})
def need_conversion(dst_filename, data_dict, source_filename): with open(source_filename, 'rb') as f: blob = f.read(4096) version = int(re.search(b'version number="\\$Revision: (\\d+)', blob).group(1)) data_dict['_version'] = version if not os.path.isfile(dst_filename): return True with open(dst_filename, 'rb') as f: data = pickle.load(f) return data.get('_version') != version
def need_conversion(dst_filename, data_dict, source_filename): with open(source_filename, 'rb') as f: blob = f.read(4096) version = int( re.search(b'version number="\\$Revision: (\\d+)', blob).group(1)) data_dict['_version'] = version if not os.path.isfile(dst_filename): return True with open(dst_filename, 'rb') as f: data = pickle.load(f) return data.get('_version') != version
def need_conversion(dst_filename, data_dict, source_filename): with open(source_filename, 'rb') as f: blob = f.read(4096) version_match = re.search(b'version number="\\$Revision: (\\d+)', blob) if not version_match: # CLDR 36.0 was shipped without proper revision numbers return True version = int(version_match.group(1)) data_dict['_version'] = version if not os.path.isfile(dst_filename): return True with open(dst_filename, 'rb') as f: data = pickle.load(f) return data.get('_version') != version
def get_global(key): """Return the dictionary for the given key in the global data. The global data is stored in the ``babel/global.dat`` file and contains information independent of individual locales. >>> get_global('zone_aliases')['UTC'] u'Etc/GMT' >>> get_global('zone_territories')['Europe/Berlin'] u'DE' The keys available are: - ``currency_fractions`` - ``language_aliases`` - ``likely_subtags`` - ``parent_exceptions`` - ``script_aliases`` - ``territory_aliases`` - ``territory_currencies`` - ``territory_languages`` - ``territory_zones`` - ``variant_aliases`` - ``win_mapping`` - ``zone_aliases`` - ``zone_territories`` .. note:: The internal structure of the data may change between versions. .. versionadded:: 0.9 :param key: the data key """ global _global_data if _global_data is None: dirname = os.path.join(os.path.dirname(__file__)) filename = os.path.join(dirname, 'global.dat') if not os.path.isfile(filename): _raise_no_data_error() fileobj = open(filename, 'rb') try: _global_data = pickle.load(fileobj) finally: fileobj.close() return _global_data.get(key, {})
#!/usr/bin/env python # -*- coding: utf-8 -*- from babel._compat import pickle, text_type dst_filename="../babel\localedata\zh_Hant.dat" #en.dat with open(dst_filename, 'rb') as f: data = pickle.load(f) #data.get('_version') != version) #print(data.get('territories')) #print(data.get('territories_long')) print(data.get('territories')['HK']) print(data.get('territories')['MK']) print(data.get('territories_long')) dst_filename="../babel/global.dat" with open(dst_filename, 'rb') as f: data_g = pickle.load(f) print(data_g.get('territory_containment_extra')) print(data_g.get('territory_containment')) #Northern America print(data_g.get('territory_containment').get('021')) #North America print(data_g.get('territory_containment_extra').get('grouping').get('003'))