def parse(path, source_path=None, locale=None): try: pofile = polib.pofile(path, wrapwidth=200) except OSError as err: raise ParseError(f"Failed to parse {path}: {err}") return POResource(pofile)
def save(self, locale): """ Load the source resource, modify it with changes made to this Resource instance, and save it over the locale-specific resource. """ if not self.source_resource: raise SyncError( 'Cannot save JSON resource {0}: No source resource given.' .format(self.path) ) with codecs.open(self.source_resource.path, 'r', 'utf-8') as resource: json_file = json.load(resource, object_pairs_hook=OrderedDict) try: validate(json_file, SCHEMA) except ValidationError as e: raise ParseError(e) # Iterate over a copy, leaving original free to modify for key, value in json_file.copy().items(): entity = self.entities[key] if entity.strings: json_file[key]['message'] = entity.strings[None] else: del json_file[key] create_parent_directory(self.path) with codecs.open(self.path, 'w+', 'utf-8') as f: log.debug('Saving file: %s', self.path) f.write(json.dumps(json_file, ensure_ascii=False, indent=2)) f.write('\n') # Add newline
def parse(path, source_path=None, locale=None): try: pofile = polib.pofile(path, wrapwidth=200) except IOError as err: raise ParseError(u'Failed to parse {path}: {err}'.format(path=path, err=err)) return POResource(pofile)
def __init__(self, path, source_resource=None): self.path = path self.entities = {} self.source_resource = source_resource # Copy entities from the source_resource if it's available. if source_resource: for key, entity in source_resource.entities.items(): data = copy.copy(entity.data) data["message"] = None self.entities[key] = JSONEntity(entity.order, entity.key, data,) try: with codecs.open(path, "r", "utf-8") as resource: self.json_file = json.load(resource, object_pairs_hook=OrderedDict) validate(self.json_file, SCHEMA) except (IOError, ValueError, ValidationError) as err: # If the file doesn't exist or cannot be decoded, # but we have a source resource, # we can keep going, we'll just not have any translations. if source_resource: return else: raise ParseError(err) for order, (key, data) in enumerate(self.json_file.items()): self.entities[key] = JSONEntity(order, key, data,)
def __init__(self, path, source_resource=None): self.path = path self.entities = OrderedDict() # Preserve entity order. self.source_resource = source_resource try: self.parser = parser.getParser(self.path) except UserWarning as err: raise ParseError(err) self.parsed_objects = [] # A monolingual l10n file might not contain all entities, but the code # expects ParsedResource to contain representations of all of them. So # when parsing the l10n resource, we first create empty entity for each # source resource entity. if source_resource: for key, entity in source_resource.entities.items(): self.entities[key] = CompareLocalesEntity( entity.key, None, None, 0, ) try: self.parser.readFile(self.path) except IOError as err: # If the file doesn't exist, but we have a source resource, # we can keep going, we'll just not have any translations. if source_resource: return else: raise ParseError(err) self.parsed_objects = list(self.parser.walk()) order = 0 for entity in self.parsed_objects: if isinstance(entity, parser.Entity): self.entities[entity.key] = CompareLocalesEntity( entity.key, entity.unwrap(), entity.pre_comment, order, ) order += 1
def parse(path, source_path=None): try: pofile = polib.pofile(path) except IOError as err: wrapped = ParseError(u'Failed to parse {path}: {err}'.format(path=path, err=err)) raise wrapped, None, sys.exc_info()[2] return POResource(pofile)
def parse(path, source_path=None, locale=None): with open(path) as f: xml = f.read().encode("utf-8") try: xliff_file = xliff.xlifffile(xml) except etree.XMLSyntaxError as err: raise ParseError(f"Failed to parse {path}: {err}") return XLIFFResource(path, xliff_file)
def parse(path, source_path=None, locale=None): # Read as utf-8-sig in case there's a BOM at the start of the file # that we want to remove. with codecs.open(path, "r", "utf-8-sig") as f: content = f.read() try: children = LangVisitor().parse(content) except (ParsimoniousParseError, VisitationError) as err: raise ParseError(f"Failed to parse {path}: {err}") from err return LangResource(path, children)
def __init__(self, parser, path, source_resource=None): self.parser = parser self.path = path self.source_resource = source_resource self.entities = OrderedDict() # Preserve entity order. # Bug 1193860: unescape quotes in some files self.escape_quotes_on = "mobile/android/base" in path and parser is DTDParser # Copy entities from the source_resource if it's available. if source_resource: for key, entity in source_resource.entities.items(): self.entities[key] = copy_source_entity(entity) try: # Only uncomment MOZ_LANGPACK_CONTRIBUTORS if this is a .inc # file and a source resource (i.e. it has no source resource # itself). self.structure = parser.get_structure( read_file( path, uncomment_moz_langpack=parser is IncParser and not source_resource, )) # Parse errors are handled gracefully by silme # No need to catch them here except OSError as err: # If the file doesn't exist, but we have a source resource, # we can keep going, we'll just not have any translations. if source_resource: return else: raise ParseError(err) comments = [] current_order = 0 for obj in self.structure: if isinstance(obj, silme.core.entity.Entity): if self.escape_quotes_on: obj.value = unescape_quotes(obj.value) entity = SilmeEntity(obj, comments, current_order) self.entities[entity.key] = entity current_order += 1 comments = [] elif isinstance(obj, silme.core.structure.Comment): for comment in obj: # Silme groups comments together, so we strip # whitespace and split them up. lines = str(comment).strip().split("\n") comments += [line.strip() for line in lines]
def parse(path, source_path=None): # Read as utf-8-sig in case there's a BOM at the start of the file # that we want to remove. with codecs.open(path, 'r', 'utf-8-sig') as f: content = f.read() try: children = LangVisitor().parse(content) except (ParsimoniousParseError, VisitationError) as err: wrapped = ParseError(u'Failed to parse {path}: {err}'.format(path=path, err=err)) raise wrapped, None, sys.exc_info()[2] # NOQA return LangResource(path, children)
def open_json_file(self, path, SCHEMA, source_resource=None): try: with codecs.open(path, "r", "utf-8") as resource: json_file = json.load(resource, object_pairs_hook=OrderedDict) validate(json_file, SCHEMA) return json_file except (OSError, ValueError, ValidationError) as err: # If the file doesn't exist or cannot be decoded, # but we have a source resource, # we can keep going, we'll just not have any translations. if source_resource: return {} else: raise ParseError(err)
def vcs_resource_constructor(project, path, locales=None): if path == 'failure': raise ParseError('error message') else: return 'successful resource'
def __init__(self, path, locale, source_resource=None): self.path = path self.locale = locale self.entities = {} self.source_resource = source_resource self.order = 0 # Copy entities from the source_resource if it's available. if source_resource: for key, entity in source_resource.entities.items(): self.entities[key] = FTLEntity( entity.key, "", "", {}, copy.copy(entity.comments), copy.copy(entity.group_comments), copy.copy(entity.resource_comments), entity.order, ) try: with codecs.open(path, "r", "utf-8") as resource: self.structure = parser.parse(resource.read()) # Parse errors are handled gracefully by fluent # No need to catch them here except OSError as err: # If the file doesn't exist, but we have a source resource, # we can keep going, we'll just not have any translations. if source_resource: return else: raise ParseError(err) group_comment = [] resource_comment = [] for obj in self.structure.body: if isinstance(obj, localizable_entries): key = get_key(obj) comment = [obj.comment.content] if obj.comment else [] # Do not store comments in the string column obj.comment = None translation = serializer.serialize_entry(obj) self.entities[key] = FTLEntity( key, translation, "", {None: translation}, comment, group_comment, resource_comment, self.order, ) self.order += 1 elif isinstance(obj, ast.GroupComment): group_comment = [obj.content] elif isinstance(obj, ast.ResourceComment): resource_comment += [obj.content]