def check_json(self, file_list): passing = True required_loc_fields = open(self.stonehearth_root + 'scripts/i18n/loc_fields.json') LOCALIZABLE_FIELDS = json.load(required_loc_fields) for path in file_list: if not os.path.isfile(path): continue if not ROOT_PATH in path: continue if not self.json_file_root in path: continue root, f = os.path.split(path) if os.path.splitext(f)[1] != '.json': continue with open(path) as data_file: try: data = json.load(data_file) except Exception, e: print 'Improper Format: ' + path + str(e) passing = False print "checking file " + path file_errors = [] json_selector = JsonSelector(data) for select_path in LOCALIZABLE_FIELDS['global'].keys(): field_values = json_selector.select_path_values( select_path) for value in field_values: result, message = self._check_string_field(value) if not result: file_errors.append(select_path + " Failed: " + message) if len(file_errors) > 0: print "file " + path + " has localization errors! Run 'make loc' to automatically generate keys." for error in file_errors: print "\t " + error passing = False
def check_json(self, file_list): passing = True required_loc_fields = open(self.stonehearth_root + 'scripts/i18n/loc_fields.json') LOCALIZABLE_FIELDS = json.load(required_loc_fields) for path in file_list: if not os.path.isfile(path): continue if not ROOT_PATH in path: continue if not self.json_file_root in path: continue root, f = os.path.split(path) if os.path.splitext(f)[1] != '.json': continue with open(path) as data_file: try: data = json.load(data_file) except Exception, e: print 'Improper Format: ' + path + str(e) passing = False print "checking file " + path file_errors = [] json_selector = JsonSelector(data) for select_path in LOCALIZABLE_FIELDS['global'].keys(): field_values = json_selector.select_path_values(select_path) for value in field_values: result, message = self._check_string_field(value) if not result: file_errors.append(select_path + " Failed: " + message) if len(file_errors) > 0: print "file " + path + " has localization errors! Run 'make loc' to automatically generate keys." for error in file_errors: print "\t " + error passing = False
def generate_loc_keys(self): self.choice_regex_pattern = re.compile(self.mod + ':') if len(self.file_list) <= 0: return if args.dry_run: loc_file_path = 'generated_loc/' else: loc_file_path = self.mod_path + EN_JSON_PATH try: loc_file = open(loc_file_path + EN_JSON_FILE, 'r+') loc_dictionary = json.load(loc_file, object_pairs_hook=OrderedDict) except: if not os.path.exists(loc_file_path): os.makedirs(loc_file_path) loc_file = open(loc_file_path + EN_JSON_FILE, 'w') loc_dictionary = OrderedDict() loc_fields_data = open(os.path.join(self.python_location, 'loc_fields.json')) self.LOCALIZABLE_FIELDS = json.load(loc_fields_data, object_pairs_hook=OrderedDict) for path in self.file_list: if not self.mod_path in path: continue if not os.path.isfile(path): continue root, f = os.path.split(path) split_root = root.split(self.mod_path + "/") file_name_info = os.path.splitext(f) if len(split_root) > 1: split_root_modified = split_root[1] for original, replacement in TRUNCATED_PATHS.iteritems(): if original == split_root_modified: _print_info(original + ' is in ' + split_root_modified) split_root_modified = replacement break _print_info('split_root_modified ' + split_root_modified) localization_root = split_root_modified.split('/') _print_info('file name = ' + f + " last loc root = " + localization_root[-1]) if file_name_info[0] != localization_root[-1]: localization_root.append(file_name_info[0]) else: localization_root = [file_name_info] if localization_root[0] in INVALID_KEYWORDS: localization_root[0] = self.mod + '_' + localization_root[0] _print_info("loc root " + str(localization_root)) with open(path, 'r+') as data_file: print 'processing file: ' + path modified = False data = json.load(data_file, object_pairs_hook=OrderedDict) json_selector = JsonSelector(data) for field, field_name_format in self.LOCALIZABLE_FIELDS['global'].iteritems(): split_path = field.split('.') field_name = split_path.pop() found_parents = json_selector.select_path_parents(field) if len(found_parents) > 0: _print_info("select path " + field + " returned " + str(found_parents)) for unique_selector_path, found in found_parents.iteritems(): _print_info("unique_selector_path : " + unique_selector_path + " found: " + str(found) + " found type: " + str(type(found))) if field_name == '*' and (not isinstance(found, basestring)): if isinstance(found, dict): for sub_name, item in found.iteritems(): has_mod = self._try_add_field(sub_name, localization_root, field_name_format, unique_selector_path + sub_name + '/', found, loc_dictionary) if has_mod: modified = True else: for i in range(0, len(found)): sub_name = '{0:03d}'.format(i) has_mod = self._try_add_field(sub_name, localization_root, field_name_format, unique_selector_path + sub_name + '/', found, loc_dictionary, found_parent_index=i) if has_mod: modified = True elif isinstance(found, dict): has_mod = self._try_add_field(field_name, localization_root, field_name_format, unique_selector_path, found, loc_dictionary) if has_mod: modified = True else: _print_info("Found a Non-dictionary parent for field " + unique_selector_path + ". Cannot convert into localized string.") if modified and field == "components.unit_info.name": found['display_name'] = found['name'] found.pop('name') # Do special formatting for choices for select_path, select_format in self.LOCALIZABLE_FIELDS['choices'].iteritems(): parents = json_selector.select_path_parents(select_path) if len(parents) > 0: for unique_selector_path, found in parents.iteritems(): count = 0 for key, value in found.iteritems(): sub_name = '{0:03d}'.format(count) has_mod = self._try_add_choice(key, sub_name, localization_root, select_format, unique_selector_path, found, loc_dictionary) if has_mod: modified = True count = count + 1 if modified: _print_info("modifying file " + f) if not args.dry_run: data_file_str = json.dumps(data, indent=3, separators=(',', ': ')) data_file.seek(0) data_file.truncate() data_file.write(data_file_str) loc_file.seek(0) loc_file.truncate() json_string = json.dumps(loc_dictionary, indent=3, separators=(',', ': ')) loc_file.write(json_string) print 'localization key generation complete'
def generate_loc_keys(self): self.choice_regex_pattern = re.compile(self.mod + ':') if len(self.file_list) <= 0: return if args.dry_run: loc_file_path = 'generated_loc/' else: loc_file_path = self.mod_path + EN_JSON_PATH try: loc_file = open(loc_file_path + EN_JSON_FILE, 'r+') loc_dictionary = json.load(loc_file, object_pairs_hook=OrderedDict) except: if not os.path.exists(loc_file_path): os.makedirs(loc_file_path) loc_file = open(loc_file_path + EN_JSON_FILE, 'w') loc_dictionary = OrderedDict() loc_fields_data = open( os.path.join(self.python_location, 'loc_fields.json')) self.LOCALIZABLE_FIELDS = json.load(loc_fields_data, object_pairs_hook=OrderedDict) for path in self.file_list: if not self.mod_path in path: continue if not os.path.isfile(path): continue root, f = os.path.split(path) split_root = root.split(self.mod_path + "/") file_name_info = os.path.splitext(f) if len(split_root) > 1: split_root_modified = split_root[1] for original, replacement in TRUNCATED_PATHS.iteritems(): if original == split_root_modified: _print_info(original + ' is in ' + split_root_modified) split_root_modified = replacement break _print_info('split_root_modified ' + split_root_modified) localization_root = split_root_modified.split('/') _print_info('file name = ' + f + " last loc root = " + localization_root[-1]) if file_name_info[0] != localization_root[-1]: localization_root.append(file_name_info[0]) else: localization_root = [file_name_info] if localization_root[0] in INVALID_KEYWORDS: localization_root[0] = self.mod + '_' + localization_root[0] _print_info("loc root " + str(localization_root)) with open(path, 'r+') as data_file: print 'processing file: ' + path modified = False data = json.load(data_file, object_pairs_hook=OrderedDict) json_selector = JsonSelector(data) for field, field_name_format in self.LOCALIZABLE_FIELDS[ 'global'].iteritems(): split_path = field.split('.') field_name = split_path.pop() found_parents = json_selector.select_path_parents(field) if len(found_parents) > 0: _print_info("select path " + field + " returned " + str(found_parents)) for unique_selector_path, found in found_parents.iteritems( ): _print_info("unique_selector_path : " + unique_selector_path + " found: " + str(found) + " found type: " + str(type(found))) if field_name == '*' and (not isinstance( found, basestring)): if isinstance(found, dict): for sub_name, item in found.iteritems(): has_mod = self._try_add_field( sub_name, localization_root, field_name_format, unique_selector_path + sub_name + '/', found, loc_dictionary) if has_mod: modified = True else: for i in range(0, len(found)): sub_name = '{0:03d}'.format(i) has_mod = self._try_add_field( sub_name, localization_root, field_name_format, unique_selector_path + sub_name + '/', found, loc_dictionary, found_parent_index=i) if has_mod: modified = True elif isinstance(found, dict): has_mod = self._try_add_field( field_name, localization_root, field_name_format, unique_selector_path, found, loc_dictionary) if has_mod: modified = True else: _print_info( "Found a Non-dictionary parent for field " + unique_selector_path + ". Cannot convert into localized string.") if modified and field == "components.unit_info.name": found['display_name'] = found['name'] found.pop('name') # Do special formatting for choices for select_path, select_format in self.LOCALIZABLE_FIELDS[ 'choices'].iteritems(): parents = json_selector.select_path_parents(select_path) if len(parents) > 0: for unique_selector_path, found in parents.iteritems(): count = 0 for key, value in found.iteritems(): sub_name = '{0:03d}'.format(count) has_mod = self._try_add_choice( key, sub_name, localization_root, select_format, unique_selector_path, found, loc_dictionary) if has_mod: modified = True count = count + 1 if modified: _print_info("modifying file " + f) if not args.dry_run: data_file_str = json.dumps(data, indent=3, separators=(',', ': ')) data_file.seek(0) data_file.truncate() data_file.write(data_file_str) loc_file.seek(0) loc_file.truncate() json_string = json.dumps(loc_dictionary, indent=3, separators=(',', ': ')) loc_file.write(json_string) print 'localization key generation complete'