def match_for_index (index, interps_orig, n_can_miss=0): nproblems = 0 interps_trans = _collect_interps(msg.msgstr[index]) if interps_orig != interps_trans: interps_missing = interps_orig.difference(interps_trans) # Eliminate from check interpolations explicitly ignored. for cmnt in [x.strip() for x in msg.manual_comment]: if cmnt.startswith("ignore-interpolations:"): interps = cmnt[cmnt.find(":") + 1:].split() for interp in interps: interp = interp.strip() if not interp.startswith("$"): interp = "$%s" % interp if interp in interps_missing: interps_missing.remove(interp) interps_unknown = interps_trans.difference(interps_orig) if interps_missing and len(interps_missing) > n_can_miss: vfmt = format_item_list(interps_missing) hl.append(("msgstr", index, [(None, None, _("@info", "Missing interpolations: %(interplist)s.", interplist=vfmt))])) nproblems += 1 elif interps_unknown: vfmt = format_item_list(interps_unknown) hl.append(("msgstr", index, [(None, None, _("@info", "Unknown interpolations: %(interplist)s.", interplist=vfmt))])) nproblems += 1 return nproblems
def __init__ (self, params): self.strict = params.strict self.showmsg = params.showmsg self.lokalize = params.lokalize self.selected_checks = None if params.check is not None: unknown_checks = [] for chname in params.check: if chname not in _known_checks: unknown_checks.append(chname) if unknown_checks: fmtchecks = format_item_list(unknown_checks) raise SieveError( _("@info", "Unknown checks selected: %(chklist)s.", chklist=fmtchecks)) self.selected_checks = set(params.check) # Indicators to the caller: self.caller_sync = False # no need to sync catalogs to the caller self.caller_monitored = False # no need for monitored messages self.nproblems = 0
def setup_sieve (p): p.set_desc(_("@info sieve discription", "Check validity of messages in catalogs within KDE Translation Project." )) p.add_param("strict", bool, defval=False, desc=_("@info sieve parameter discription", "Check translations strictly: report problems in translation regardless " "of whether original itself is valid (default is to check translation " "only if original passes checks)." )) chnames = _known_checks.keys() chnames.sort() p.add_param("check", unicode, seplist=True, metavar=_("@info sieve parameter value placeholder", "KEYWORD,..."), desc=_("@info sieve parameter discription", "Run only this check instead of all (currently available: %(chklist)s). " "Several checks can be specified as a comma-separated list.", chklist=format_item_list(chnames) )) p.add_param("showmsg", bool, defval=False, desc=_("@info sieve parameter discription", "Also show the full message that had some problems." )) add_param_poeditors(p)
def spell_error(msg, cat, faultyWord, suggestions): """Print formated rule error message on screen @param msg: pology.message.Message object @param cat: pology.catalog.Catalog object @param faultyWord: badly spelled word @param suggestions : list of correct words to suggest""" report("-" * 40) report( ColorString("<bold>%s:%d(%d)</bold>") % (cat.filename, msg.refline, msg.refentry)) if msg.msgctxt: report( _("@info", "<bold>Context:</bold> %(snippet)s", snippet=msg.msgctxt)) #TODO: color in red part of context that make the mistake report( _("@info", "<bold>Faulty word:</bold> <red>%(word)s</red>", word=faultyWord)) if suggestions: report( _("@info", "<bold>Suggestions:</bold> %(wordlist)s", wordlist=format_item_list(suggestions)))
def _check_qtdt_w (msgstr, msg, cat): if not _is_qtdt_msg(msg): return [] # Get format fields from the msgid. msgid_fmts = _qtdt_parse(msg.msgid) # Expect the same format fields in msgstr. msgstr_fmts = _qtdt_parse(msgstr) spans = [] if set(msgid_fmts) != set(msgstr_fmts): errmsg = _("@info", "Qt date-format mismatch: " "original contains fields {%(fieldlist1)s} " "while translation contains {%(fieldlist2)s}.", fieldlist1=format_item_list(sorted(msgid_fmts)), fieldlist2=format_item_list(sorted(msgstr_fmts))) spans.append((None, None, errmsg)) return spans
def process(self, msg, cat): for i in range(len(msg.msgstr)): msg.msgstr[i], resolved, unknown = \ resolve_entities(msg.msgstr[i], self.entities, self.ignored_entities, cat.filename) self.nresolved += len(resolved) if unknown: warning_on_msg( _("@info", "Unknown entities in translation: " "%(entlist)s.", entlist=format_item_list(unknown)), msg, cat)
def _check_wml (msg, cat, strict, hl): if _detect_markup(msg, cat) != "wml": return 0 # Validate WML in original and collect links. # If the original is not valid, do not check translation. spans_orig, links_orig = _check_wml_text(msg.msgid) if spans_orig: return 0 nproblems = 0 links_trans = set() for i in range(len(msg.msgstr)): spans, links = _check_wml_text(msg.msgstr[i]) if spans: hl.append(("msgstr", i, spans)) nproblems += len(spans) elif links != links_orig: links_missing = links_orig.difference(links) links_unknown = links.difference(links_orig) if links_missing: vfmt = format_item_list(links_missing) hl.append(("msgstr", i, [(None, None, _("@info", "Missing links: %(linklist)s.", linklist=vfmt))])) nproblems += 1 elif links_unknown: vfmt = format_item_list(links_unknown) hl.append(("msgstr", i, [(None, None, _("@info", "Unknown links: %(linklist)s.", linklist=vfmt))])) nproblems += 1 return nproblems
def _construct_enchant(provider, lang, envs, encoding, variety, suponly): # Get Pology's internal personal dictonary for this language. dictpath, temporary = _compose_personal_dict(lang, envs) if not suponly: try: import enchant except ImportError: pkgs = ["python-enchant"] raise PologyError( _("@info", "Python wrapper for Enchant not found, " "please install it (possible package names: " "%(pkglist)s).", pkglist=format_item_list(pkgs))) # Create Enchant broker. try: broker = enchant.Broker() except Exception, e: raise PologyError( _("@info", "Cannot initialize Enchant:\n%(msg)s", msg=e)) # Find Enchant language. e_langs = filter(broker.dict_exists, [variety, lang]) if e_langs: e_lang = e_langs[0] else: if variety is not None: raise PologyError( _("@info", "Language '%(lang)s' and variety '%(var)s' " "not known to Enchant.", lang=lang, var=variety)) else: raise PologyError( _("@info", "Language '%(lang)s' not known to Enchant.", lang=lang)) # Choose the provider for the selected language. try: broker.set_ordering((e_lang or "*"), provider) except Exception, e: raise PologyError( _("@info", "Cannot configure Enchant for provider '%(pvd)s':\n%(msg)s", pvd=provider, msg=e))
def setup_sieve (p): p.set_desc(_("@info sieve discription", "Check validity of messages in catalogs of The Battle for Wesnoth." )) chnames = _known_checks.keys() chnames.sort() p.add_param("check", unicode, seplist=True, metavar=_("@info sieve parameter value placeholder", "KEYWORD,..."), desc=_("@info sieve parameter discription", "Run only this check instead of all (currently available: %(chklist)s). " "Several checks can be specified as a comma-separated list.", chklist=format_item_list(chnames) )) p.add_param("showmsg", bool, defval=False, desc=_("@info sieve parameter discription", "Also show the full message that had some problems." )) add_param_poeditors(p)
def add_cmdopt_colors (opars): """ Options for syntax coloring in output. """ opars.add_option( "-R", "--raw-colors", action="store_true", dest="raw_colors", default=False, help=_("@info command line option description", "Syntax coloring in output independent of destination " "(whether terminal or file).")) defctype = "term" opars.add_option( "--coloring-type", metavar=_("@info command line value placeholder", "TYPE"), action="store", dest="coloring_type", default=defctype, help=_("@info command line option description", "Type of syntax coloring in output. " "Available types: %(typelist)s; default: %(type)s.", typelist=format_item_list(get_coloring_types()), type=defctype))
def _delimit(alts, delims): good = False for delim in delims: good = True for alt in alts: if delim in alt: good = False break if good: break if not good: fmtalts = format_item_list(["{%s}" % x for x in alts]) raise PologyError( _("@info", "No delimiter from '%(delimstr)s' can be used for " "alternatives directive containing: %(snippetlist)s.", delimstr=delims, snippetlist=fmtalts)) return delim + delim.join(alts) + delim
def fmt_par (param, indent=""): s = "" s += indent + " " + param ptype = self._ptypes[param] if ptype is bool: s += " "*1 +_("@item:intext indicator that the parameter " "is a flag", "[flag]") else: metavar = self._metavars[param] if metavar is None: metavar = _("@item:intext default placehodler for " "the parameter argument", "ARG") s += cinterp(":%s", metavar) defval = self._defvals[param] admvals = self._admvals[param] if ptype is not bool and defval is not None and str(defval): cpos = len(s) - s.rfind("\n") - 1 s += " "*1 + _("@item:intext default value for the argument", "[default %(arg)s=%(val)s]", arg=metavar, val=defval) if admvals is not None: s += "\n" + (" " * cpos) if ptype is not bool and admvals is not None: s += " "*1 + _("@item:intext admissible argument values", "[%(arg)s is one of: %(vallist)s]", arg=metavar, vallist=format_item_list(admvals)) s += "\n" desc = self._descs[param] if desc: fmt_desc = fmt_wrap(desc, indent + " ") s += fmt_desc ## Wrap current parameter with empty lines if ## the description spanned several lines. #if "\n\n" in fmt_desc: #s = "\n" + s + "\n" s += "\n" # empty line after description return s
def _create_checker(providers, langtag, words): try: import enchant except ImportError: pkgs = ["python-enchant"] raise PologyError( _("@info", "Python wrapper for Enchant not found, " "please install it (possible package names: " "%(pkglist)s).", pkglist=format_item_list(pkgs))) if langtag is not None: try: broker = enchant.Broker() if providers is not None: broker.set_ordering(langtag, providers) checker = broker.request_dict(langtag) checker.check(".") except: checker = None else: tmpf = tempfile.NamedTemporaryFile() tmpf.close() checker = enchant.request_pwl_dict(tmpf.name) os.unlink(tmpf.name) if checker: pname = checker.provider.name.split()[0].lower() need_upcasing = (pname in ("personal", "myspell")) for word in words or []: checker.add_to_session(word) if need_upcasing: checker.add_to_session(word[0].upper() + word[1:]) checker.add_to_session(word.upper()) return checker
def trapnakron(envec=u"", envel=u"л", envic=u"иј", envil=u"ијл", markup="plain", tagmap=None, ptsuff=None, ltsuff=None, gnsuff=None, stsuff=None, adsuff=None, nmsuff=None, npkeyto=None, nobrhyp=False, disamb="", runtime=False): """ Main trapnakron constructor, covering all options. The trapnakron constructor sets, either by default or optionally, various transformations to enhance queries to the resulting derivator. Default Behavior ================ Property values are returned as alternatives/hybridized compositions of Ekavian Cyrillic, Ekavian Latin, Ijekavian Cyrillic, and Ijekavian Latin forms, as applicable. Any of these forms can be excluded from derivation by setting its C{env*} parameter to C{None}. C{env*} parameters can also be used to change the priority environment from which the particular form is derived. Derivation and property key separator in compound keys is the ASCII hyphen (C{-}). Derivation keys are derived from syntagmas by applying the L{identify()<normalize.identify>} function. In derivations where this will result in strange keys, additional keys should be defined through hidden syntagmas. Property keys are transliterated into L{stripped-ASCII<lang.sr.wconv.cltoa>}. Conflict resolution for derivation keys is not strict (see L{derivator constructor<synder.Synder.__init__>}). Optional behavior ================= Instead of plain text, properties may be reported with some markup. The markup type is given by C{markup} parameter, and can be one of C{"plain"}, C{"xml"}, C{"docbook4"}. The C{tagmap} parameter contains mapping of derivation keys to tags which should wrap properties of these derivations. Derivation keys can have several suffixes which effect how the properties are reported: - Presence of the suffix given by C{ptsuff} parameter signals that properties should be forced to plain text, if another markup is globally in effect. - Parameter C{ltsuff} states the suffix which produces lighter version of the markup, where applicable (e.g. people names in Docbook). - When fetching a property within a sentence (with keys given e.g. as XML entities), sentence construction may require that the resolved value is of certain gender and number; parameter C{gnsuff} can be used to provide a tuple of 4 suffixes for gender in singular and 4 suffixes for gender in plural, such that the property will resolve only if the value of gender and number matches the gender and number suffix. - Parameters C{stsuff} and C{adsuff} provide suffixes through which systematic transcription and alternative derivations are requested. They are actually tuples, where the first element is the key suffix, and the second element the suffix to primary environment which produces the systematic/alternative environment. C{adsuff} can also be a tuple of tuples, if several alternative derivations should be reachable. - In case the entry is a person's name with tagged first and last name, parameter C{nmsuff} can provide a tuple of 2 suffixes by which only the first or last name are requested, respectively. Ordinary hyphens may be converted into non-breaking hyphens by setting the C{nobrhyp} parameter to C{True}. Non-breaking hyphens are added heuristically, see the L{to_nobr_hyphens()<lang.sr.nobr.to_nobr_hyphens>} hook. Useful e.g. to avoid wrapping on hyphen-separated case endings. A property key normally cannot be empty, but C{npkeyto} parameter can be used to automatically substitute another property key when empty property key is seen in request for properties. In the simpler version, value of C{npkeyto} is just a string of the key to substitute for empty. In the more complex version, the value is a tuple containing the key to substitute and the list of two or more supplemental property keys: empty key is replaced only if all supplemental property values exist and are equal (see e.g. L{trapnakron_plain} for usage of this). Some property values may have been manually decorated with disambiguation markers (C{¤}), to differentiate them from property values of another derivation which would otherwise appear equal under a certain normalization. By default such markers are removed, but instead they can be substituted with a string given by C{disamb} parameter. Some derivations are defined only for purposes of obtaining their properties in scripted translations at runtime. They are by default not included, but can be by setting the C{runtime} parameter to C{True}. @param envec: primary environment for Ekavian Cyrillic derivation @type envec: string or C{None} @param envel: primary environment for Ekavian Latin derivation @type envel: string or C{None} @param envic: primary environment for Ijekavian Cyrillic derivation @type envic: string or C{None} @param envil: primary environment for Ijekavian Latin derivation @type envil: string or C{None} @param markup: target markup @type markup: string @param tagmap: tags to assign to properties by derivation keys @type tagmap: dict string -> string @param ptsuff: derivation key suffix to report plain text properties @type ptsuff: string @param ltsuff: derivation key suffix to report properties in lighter markup @type ltsuff: string @param gnsuff: suffixes by gender and number, to have no resolution if gender or number do not match @type gnsuff: [(string, string)*] @param stsuff: derivation key and environment name suffixes to report systematic transcriptions @type stsuff: (string, string) @param adsuff: derivation key and environment name suffixes to report alternative derivations @type adsuff: (string, string) or ((string, string)*) @param nmsuff: suffixes for fetching only first or last name of a person @type nmsuff: (string, string) @param npkeyto: property key to substitute for empty key, when given @type npkeyto: string or (string, [string*]) @param nobrhyp: whether to convert some ordinary into non-breaking hyphens @type nobrhyp: bool @param disamb: string to replace each disambiguation marker with @type disamb: string @param runtime: whether to include runtime-only derivations @type runtime: bool @returns: trapnakron derivator @rtype: L{Synder<synder.Synder>} """ env0s = [envec, envel, envic, envil] combo = "".join([(x is not None and "1" or "0") for x in env0s]) if combo not in _good_eicl_combos: raise PologyError( _( "@info", "Invalid combination of Ekavian/Ijekavian Cyrillic/Latin " "environments to trapnakron derivator.")) if markup not in _known_markups: raise PologyError( _("@info", "Unknown markup type '%(mtype)s' to trapnakron derivator " "(known markups: %(mtypelist)s).", mtype=markup, mtypelist=format_item_list(_known_markups))) # Compose environment fallback chains. env = [] envprops = [] # [(islatin, isije)*] vd = lambda e, d: e if e is not None else d if envec is not None: env.append((envec, )) envprops.append((False, False)) if envel is not None: env.append((envel, vd(envec, u""))) envprops.append((True, False)) if envic is not None: env.append((envic, vd(envec, u""))) envprops.append((False, True)) if envil is not None: env.append((envil, vd(envel, u"л"), vd(envic, u"иј"), vd(envec, u""))) envprops.append((True, True)) # Setup up requests by derivation key suffix. mvends = {} if ptsuff: mvends[ptsuff] = _suff_pltext_id if ltsuff: mvends[ltsuff] = _suff_ltmarkup_id if gnsuff: if len(gnsuff) != 8: raise PologyError( _( "@info", "Sequence of gender-number suffixes must have " "exactly 8 elements.")) mvends.update(zip(gnsuff, _gnmatch_suff_ids)) aenvs = {} if adsuff or stsuff: kesuffs = [] # must have same order as _aenv_suff_ids if stsuff is not None: kesuffs.append(stsuff) if not isinstance(adsuff[0], tuple): kesuffs.append(adsuff) else: kesuffs.extend(adsuff) for (ksuff, esuff), suff_id in zip(kesuffs, _aenv_suff_ids): mvends[ksuff] = suff_id # Compose environment fallback chain for this suffix. aenv = [] for env1 in env: aenv1 = [] for esuff1 in (esuff, ""): for env0 in env1: aenv1.append(env0 + esuff1) aenv.append(tuple(aenv1)) aenvs[suff_id] = tuple(aenv) if nmsuff: if len(nmsuff) != 2: raise PologyError( _( "@info", "Sequence of person name suffixes must have " "exactly 2 elements.")) mvends.update(zip(nmsuff, _pname_suff_ids)) # Setup substitution of empty property keys. expkeys = [] if isinstance(npkeyto, tuple): npkeyto, expkeys = npkeyto # Create transformators. dkeytf = _sd_dkey_transf(mvends, tagmap) pkeytf = _sd_pkey_transf(npkeyto, expkeys) pvaltf = _sd_pval_transf(envprops, markup, nobrhyp, disamb) ksyntf = _sd_ksyn_transf(markup, False, disamb) envtf = _sd_env_transf(aenvs) # Build the derivator. sd = Synder(env=env, ckeysep="-", dkeytf=dkeytf, dkeyitf=identify, pkeytf=pkeytf, pkeyitf=norm_pkey, pvaltf=pvaltf, ksyntf=ksyntf, envtf=envtf, strictkey=False) # Collect synder files composing the trapnakron. sdfiles = _get_trapnakron_files(runtime) # Import into derivator. for sdfile in sdfiles: sd.import_file(sdfile) return sd
def setup_sieve (p): p.set_desc(_("@info sieve discription", "Find messages in catalogs." "\n\n" "Each message is matched according to one or several criteria, " "and if it matches as whole, it is displayed to standard output, " "along with the catalog path and referent line and entry number." "\n\n" "When several matching parameters are given, by default a message " "is matched if all of them match (AND-relation). " "This can be changed to OR-relation for matching in text fields " "(%(fieldlist)s) using the '%(par)s' parameter. " "Any matching parameter can be repeated when it makes sense " "(e.g. two matches on msgid).", fieldlist=format_item_list(["msgctxt", "msgid", "msgstr", "comment"]), par="or" )) # NOTE: Do not add default values for matchers, # we need None to see if they were issued or not. p.add_param("msgid", unicode, multival=True, metavar=_("@info sieve parameter value placeholder", "REGEX"), desc=_("@info sieve parameter discription", "Matches if the '%(field)s' field matches the regular expression.", field="msgid" )) p.add_param("nmsgid", unicode, multival=True, metavar=_("@info sieve parameter value placeholder", "REGEX"), desc=_("@info sieve parameter discription", "Matches if the '%(field)s' field does not match the regular expression.", field="msgid" )) p.add_param("msgstr", unicode, multival=True, metavar=_("@info sieve parameter value placeholder", "REGEX"), desc=_("@info sieve parameter discription", "Matches if the '%(field)s' field matches the regular expression.", field="msgstr" )) p.add_param("nmsgstr", unicode, multival=True, metavar=_("@info sieve parameter value placeholder", "REGEX"), desc=_("@info sieve parameter discription", "Matches if the '%(field)s' field does not match the regular expression.", field="msgstr" )) p.add_param("msgctxt", unicode, multival=True, metavar=_("@info sieve parameter value placeholder", "REGEX"), desc=_("@info sieve parameter discription", "Matches if the '%(field)s' field matches the regular expression.", field="msgctxt" )) p.add_param("nmsgctxt", unicode, multival=True, metavar=_("@info sieve parameter value placeholder", "REGEX"), desc=_("@info sieve parameter discription", "Matches if the '%(field)s' field does not match the regular expression.", field="msgctxt" )) p.add_param("comment", unicode, multival=True, metavar=_("@info sieve parameter value placeholder", "REGEX"), desc=_("@info sieve parameter discription", "Matches if a comment line (extracted or translator) " "matches the regular expression." )) p.add_param("ncomment", unicode, multival=True, metavar=_("@info sieve parameter value placeholder", "REGEX"), desc=_("@info sieve parameter discription", "Matches if a comment line (extracted or translator) " "does not match the regular expression." )) p.add_param("transl", bool, desc=_("@info sieve parameter discription", "Matches if the message is translated." )) p.add_param("ntransl", bool, desc=_("@info sieve parameter discription", "Matches if the message is not translated." )) p.add_param("obsol", bool, desc=_("@info sieve parameter discription", "Matches if the message is obsolete." )) p.add_param("nobsol", bool, desc=_("@info sieve parameter discription", "Matches if the message is not obsolete." )) p.add_param("active", bool, desc=_("@info sieve parameter discription", "Matches if the message is active (translated and not obsolete)." )) p.add_param("nactive", bool, desc=_("@info sieve parameter discription", "Matches if the message is not active (not translated or obsolete)." )) p.add_param("flag", unicode, multival=True, metavar=_("@info sieve parameter value placeholder", "REGEX"), desc=_("@info sieve parameter discription", "Matches if one of the flags matches the regular expression." )) p.add_param("nflag", unicode, multival=True, metavar=_("@info sieve parameter value placeholder", "REGEX"), desc=_("@info sieve parameter discription", "Matches if none of the flags matches the regular expression." )) p.add_param("plural", bool, desc=_("@info sieve parameter discription", "Matches if the message is plural." )) p.add_param("nplural", bool, desc=_("@info sieve parameter discription", "Matches if the message is not plural." )) p.add_param("maxchar", int, metavar=_("@info sieve parameter value placeholder", "NUM"), desc=_("@info sieve parameter discription", "Matches if both the '%(field1)s' and '%(field2)s' field " "have at most this many characters " "(0 or less means any number of characters).", field1="msgid", field2="msgstr" )) p.add_param("nmaxchar", int, metavar=_("@info sieve parameter value placeholder", "NUM"), desc=_("@info sieve parameter discription", "Matches if either the '%(field1)s' or '%(field2)s' field " "have more than this many characters " "(0 or less means any number of characters).", field1="msgid", field2="msgstr" )) p.add_param("lspan", unicode, metavar=_("@info sieve parameter value placeholder", "START:END"), desc=_("@info sieve parameter discription", "Matches if the message line number is in the given range " "(including starting line, excluding ending line)." )) p.add_param("nlspan", unicode, metavar=_("@info sieve parameter value placeholder", "START:END"), desc=_("@info sieve parameter discription", "Matches if the message line number is not in the given range " "(including starting line, excluding ending line)." )) p.add_param("espan", unicode, metavar=_("@info sieve parameter value placeholder", "START:END"), desc=_("@info sieve parameter discription", "Matches if the message entry number is in the given range " "(including starting entry, excluding ending entry)." )) p.add_param("nespan", unicode, metavar=_("@info sieve parameter value placeholder", "START:END"), desc=_("@info sieve parameter discription", "Matches if the message entry number is not in the given range " "(including starting entry, excluding ending entry)." )) p.add_param("branch", unicode, seplist=True, metavar=_("@info sieve parameter value placeholder", "BRANCH"), desc=_("@info sieve parameter discription", "In summit catalogs, match only messages belonging to given branch. " "Several branches can be given as comma-separated list." )) p.add_param("nbranch", unicode, seplist=True, metavar=_("@info sieve parameter value placeholder", "BRANCH"), desc=_("@info sieve parameter discription", "Match only messages not belonging to given branch." )) p.add_param("fexpr", unicode, metavar=_("@info sieve parameter value placeholder", "EXPRESSION"), desc=_("@info sieve parameter discription", "Matches if the logical expression matches. " "The expression is composed of direct matchers (not starting with n*), " "explicitly linked with AND, OR, and NOT operators, and parenthesis. " "Base matchers taking parameters are given as MATCHER/VALUE/, " "where slash can be replaced consistently with any other character. " "Global matching modifiers can be overriden using MATCHER/VALUE/MODS, or " "MATCHER/MODS for parameterless matchers " "(currently available: c/i for case-sensitive/insensitive). " "Examples:" "\n\n" "fexpr:'(msgctxt/foo/ or comment/foo/) and msgid/bar/'" "\n\n" "fexpr:'msgid/quuk/ and msgstr/Qaak/c'" )) p.add_param("nfexpr", unicode, metavar=_("@info sieve parameter value placeholder", "EXPRESSION"), desc=_("@info sieve parameter discription", "Matches if the logical expression does not match." )) p.add_param("or", bool, defval=False, attrname="or_match", desc=_("@info sieve parameter discription", "Use OR-relation for matching text fields: if any of " "the patterns matches, the message is matched as whole." )) p.add_param("invert", bool, defval=False, desc=_("@info sieve parameter discription", "Invert the condition: report messages which do not match." )) p.add_param("case", bool, defval=False, desc=_("@info sieve parameter discription", "Case-sensitive text matching." )) p.add_param("accel", unicode, multival=True, metavar=_("@info sieve parameter value placeholder", "CHAR"), desc=_("@info sieve parameter discription", "Character which is used as UI accelerator marker in text fields, " "to ignore it on matching. " "If a catalog defines accelerator marker in the header, " "this value overrides it." )) p.add_param("mark", bool, defval=False, desc=_("@info sieve parameter discription", "Add '%(flag)s' flag to each matched message.", flag=_flag_mark )) p.add_param("filter", unicode, multival=True, metavar=_("@info sieve parameter value placeholder", "HOOK"), desc=_("@info sieve parameter discription", "F1A hook specification, to filter the msgstr fields through " "before matching them. " "Several hooks can be specified by repeating the parameter." )) p.add_param("replace", unicode, metavar=_("@info sieve parameter value placeholder", "REPLSTR"), desc=_("@info sieve parameter discription", "Replace all substrings matched by msgstr pattern with REPLSTR. " "It can include back-references to matched groups (\\1, \\2, etc.)" )) p.add_param("nomsg", bool, defval=False, desc=_("@info sieve parameter discription", "Do not report message to standard output " "(when only the number of matches is wanted)." )) add_param_poeditors(p)
def _main(): locale.setlocale(locale.LC_ALL, "") usage = _("@info command usage", "%(cmd)s [OPTIONS] VCS [POPATHS...]", cmd="%prog") desc = _( "@info command description", "Compose hybridized Ijekavian-Ekavian translation out of " "translation modified from Ekavian to Ijekavian or vice-versa.") ver = _("@info command version", u"%(cmd)s (Pology) %(version)s\n" u"Copyright © 2009, 2010 " u"Chusslove Illich (Часлав Илић) <%(email)s>", cmd="%prog", version=version(), email="*****@*****.**") opars = ColorOptionParser(usage=usage, description=desc, version=ver) opars.add_option( "-a", "--accept-changes", action="store_true", dest="accept_changes", default=False, help=_( "@info command line option description", "Accept messages which have some changes between base " "and reconstructed base text.")) opars.add_option("-r", "--base-revision", metavar=_("@info command line value placeholder", "REVISION"), action="store", dest="base_revision", default=None, help=_( "@info command line option description", "Use the given revision as base for hybridization, " "instead of local latest revision.")) add_cmdopt_filesfrom(opars) (options, free_args) = opars.parse_args(str_to_unicode(sys.argv[1:])) try: import psyco psyco.full() except ImportError: pass # Create VCS. if len(free_args) < 1: showvcs = list(set(available_vcs()).difference(["none"])) showvcs.sort() error( _("@info", "Version control system not given " "(can be one of: %(vcslist)s).", vcslist=format_item_list(showvcs))) vcskey = free_args.pop(0) if vcskey not in available_vcs(flat=True): error( _("@info", "Unknown version control system '%(vcs)s'.", vcs=vcskey)) vcs = make_vcs(vcskey) # Collect PO files in given paths. popaths = collect_paths_cmdline(rawpaths=free_args, filesfrom=options.files_from, elsecwd=True, respathf=collect_catalogs, abort=True) # Catalogs must be under version control. for path in popaths: if not vcs.is_versioned(path): error( _("@info", "Catalog '%(file)s' is not under version control.", file=path)) # Go by modified PO file and hybridize it. for path in popaths: # Extract local head counterpart. tmpf = NamedTemporaryFile(prefix="pohybdl-export-", suffix=".po") if not vcs.export(path, options.base_revision, tmpf.name): error( _("@info", "Version control system cannot export file '%(file)s'.", file=path)) # Hybridize by comparing local head and modified file. hybdl(path, tmpf.name, options.accept_changes)
def _loadRules (self, lang, envs): # Load rules. rules = loadRules(lang, envs, self.envOnly, self.customRuleFiles, self.stat, self.ruleinfo) # Perhaps retain only those rules explicitly requested # in the command line, by their identifiers. selectedRules = set() srules = set() if self.ruleChoice: requestedRules = set([x.strip() for x in self.ruleChoice]) foundRules = set() for rule in rules: if rule.ident in requestedRules: srules.add(rule) foundRules.add(rule.ident) rule.disabled = False if foundRules != requestedRules: missingRules = list(requestedRules - foundRules) fmtMissingRules = format_item_list(sorted(missingRules)) raise SieveError(_("@info", "Some explicitly selected rules " "are missing: %(rulelist)s.", rulelist=fmtMissingRules)) selectedRules.update(foundRules) if self.ruleChoiceRx: identRxs = [re.compile(x, re.U) for x in self.ruleChoiceRx] for rule in rules: if (rule.ident and reduce(lambda s, x: s or x.search(rule.ident), identRxs, False) ): srules.add(rule) selectedRules.add(rule.ident) if self.ruleChoice or self.ruleChoiceRx: rules = list(srules) selectedRulesInv = set() srules = set(rules) if self.ruleChoiceInv: requestedRules = set([x.strip() for x in self.ruleChoiceInv]) foundRules = set() for rule in rules: if rule.ident in requestedRules: if rule in srules: srules.remove(rule) foundRules.add(rule.ident) if foundRules != requestedRules: missingRules = list(requestedRules - foundRules) fmtMissingRules = format_item_list(sorted(missingRules)) raise SieveError(_("@info", "Some explicitly excluded rules " "are missing: %(rulelist)s.", rulelist=fmtMissingRules)) selectedRulesInv.update(foundRules) if self.ruleChoiceInvRx: identRxs = [re.compile(x, re.U) for x in self.ruleChoiceInvRx] for rule in rules: if (rule.ident and reduce(lambda s, x: s or x.search(rule.ident), identRxs, False) ): if rule in srules: srules.remove(rule) selectedRulesInv.add(rule.ident) if self.ruleChoiceInv or self.ruleChoiceInvRx: rules = list(srules) if self.ruleinfo: ntot = len(rules) ndis = len([x for x in rules if x.disabled]) nact = ntot - ndis totfmt = n_("@item:intext inserted below as %(tot)s", "Loaded %(num)d rule", "Loaded %(num)d rules", num=ntot) if self.envOnly: envfmt = _("@item:intext inserted below as %(env)s", "[only: %(envlist)s]", envlist=format_item_list(envs)) else: envfmt = _("@item:intext inserted below as %(env)s", "[%(envlist)s]", envlist=format_item_list(envs)) actfmt = n_("@item:intext inserted below as %(act)s", "%(num)d active", "%(num)d active", num=nact) disfmt = n_("@item:intext inserted below as %(dis)s", "%(num)d disabled", "%(num)d disabled", num=ndis) subs = dict(tot=totfmt, env=envfmt, act=actfmt, dis=disfmt) if ndis and envs: report(_("@info:progress insertions from above", "%(tot)s %(env)s (%(act)s, %(dis)s).", **subs)) elif ndis: report(_("@info:progress insertions from above", "%(tot)s (%(act)s, %(dis)s).", **subs)) elif envs: report(_("@info:progress insertions from above", "%(tot)s %(env)s.", **subs)) else: report(_("@info:progress insertions from above", "%(tot)s.", **subs)) if selectedRules: selectedRules = selectedRules.difference(selectedRulesInv) n = len(selectedRules) if n <= 10: rlst = list(selectedRules) report(_("@info:progress", "Selected rules: %(rulelist)s.", rulelist=format_item_list(sorted(rlst)))) else: report(n_("@info:progress", "Selected %(num)d rule.", "Selected %(num)d rules.", num=n)) elif selectedRulesInv: n = len(selectedRulesInv) if n <= 10: rlst = list(selectedRulesInv) report(_("@info:progress", "Excluded rules: %(rulelist)s.", rulelist=format_item_list(sorted(rlst)))) else: report(n_("@info:progress", "Excluded %(num)d rule.", "Excluded %(num)d rules.", num=n)) # Collect all distinct filters from rules. ruleFilters = set() for rule in rules: if not rule.disabled: ruleFilters.add(rule.mfilter) if self.ruleinfo: nflt = len([x for x in ruleFilters if x is not None]) if nflt: report(n_("@info:progress", "Active rules define %(num)d distinct filter set.", "Active rules define %(num)d distinct filter sets.", num=nflt)) return rules, ruleFilters
if expwkeys: fmtdkeys = ", ".join(sorted(tp.altdkeys(dkey))) fmtexp = "# " + fmtdkeys + "\n" + fmtexp if fmtexp not in reported_fmtexps: if not esuff: report(fmtexp) reported_fmtexps.add(fmtexp) else: afmtexp = " @" + esuff + ": " + ", ".join(fmtprops) report(afmtexp) nproblems += cnproblems tp.empty_pcache() if unmatched_srcs: fmtsrcs = format_item_list(sorted(getattr(x, "pattern", x) for x in unmatched_srcs)) warning(_("@info", "Sources requested by name not found: %(srclist)s.", srclist=fmtsrcs)) if unmatched_keys: fmtkeys = format_item_list(sorted(getattr(x, "pattern", x) for x in unmatched_keys)) warning(_("@info", "Derivations requested by key not found: %(keylist)s.", keylist=fmtkeys)) return nproblems class _Wre (object):
def process(self, msg, cat): if not msg.translated: return id = 0 # Count msgstr plural forms failedSuggs = [] # pairs of wrong words and suggestions for msgstr in msg.msgstr: # Skip message with context in the ignoredContext list skip = False for context in self.ignoredContext: if context in (msg.msgctxt or u"").lower(): skip = True break for comment in msg.auto_comment: if context in comment.lower(): skip = True break if skip: break if skip: break # Skip message if explicitly requested. if flag_no_check_spell in manc_parse_flag_list(msg, "|"): continue # Apply precheck filters. for pfilter, pfname in self.pfilters: try: # try as type F1A hook msgstr = pfilter(msgstr) except TypeError: try: # try as type F3* hook msgstr = pfilter(msgstr, msg, cat) except TypeError: raise SieveError( _("@info", "Cannot execute filter '%(filt)s'.", filt=pfname)) # Split text into words. if not self.simsp: words = proper_words(msgstr, True, cat.accelerator(), msg.format) else: # NOTE: Temporary, remove when proper_words becomes smarter. words = msgstr.split() # Eliminate from checking words matching the skip regex. if self.skipRx: words = [x for x in words if not self.skipRx.search(x)] # Eliminate from checking words explicitly listed as good. locally_ignored = manc_parse_list(msg, elist_well_spelled, ",") words = [x for x in words if x not in locally_ignored] for word in words: # Encode word for Aspell. encodedWord = word.encode(self.encoding) spell = self.aspell.check(encodedWord) if spell is False: try: self.nmatch += 1 if self.unknownWords is not None: self.unknownWords.add(word) else: encodedSuggestions = self.aspell.suggest( encodedWord) suggestions = [ i.decode(self.encoding) for i in encodedSuggestions ] failedSuggs.append((word, suggestions)) if self.xmlFile: xmlError = spell_xml_error( msg, cat, word, suggestions, id) self.xmlFile.writelines(xmlError) else: spell_error(msg, cat, word, suggestions) except UnicodeEncodeError: warning( _("@info", "Cannot encode word '%(word)s' in " "selected encoding '%(enc)s'.", word=word, enc=self.encoding)) id += 1 # Increase msgstr id count if failedSuggs and self.lokalize: repls = [_("@label", "Spelling errors:")] for word, suggs in failedSuggs: if suggs: fmtsuggs = format_item_list(suggs) repls.append( _("@item", "%(word)s (suggestions: %(wordlist)s)", word=word, wordlist=fmtsuggs)) else: repls.append("%s" % (word)) report_msg_to_lokalize(msg, cat, cjoin(repls, "\n"))
def parse (self, rawpars, subcmds): """ Parse the list of parameters collected from the command line. If the command line had parameters specified as:: -sfoo -sbar:xyz -sbaz:10 then the function call should get the list:: rawpars=['foo', 'bar:xyz', 'baz:10'] Result of parsing will be a dictionary of objects by subcommand name, where each object has attributes named like subcommand parameters. If attribute name has not been explicitly defined for a parameter, its parameter name will be used; if not a valid identifier by itself, it will be normalized by replacing all troublesome characters with an underscore, collapsing contiguous underscore sequences to a single underscore, and prepending an 'x' if it does not start with a letter. If a parameter is parsed which is not accepted by any of the given subcommands, its name is added to list of non-accepted parameters, which is the second element of the return tuple. @param rawpars: raw parameters @type rawpars: list of strings @param subcmds: names of issued subcommands @type subcmds: list of strings @return: objects with parameters as attributes, and list of parameter names not accepted by any of subcommands @rtype: dict of objects by subcommand name and list of strings """ # Assure only registered subcommands have been issued. for subcmd in subcmds: if subcmd not in self._scviews: raise SubcmdError( _("@info", "Unregistered subcommand '%(cmd)s' issued.", cmd=subcmd)) # Parse all given parameters and collect their values. param_vals = dict([(x, {}) for x in subcmds]) nacc_params = [] for opstr in rawpars: lst = opstr.split(":", 1) lst += [None] * (2 - len(lst)) param, strval = lst param_accepted = False for subcmd in subcmds: scview = self._scviews[subcmd] if param not in scview._ptypes: # Current subcommand does not have this parameter, skip. continue if param in param_vals[subcmd] and not scview._multivals[param]: raise SubcmdError( _("@info", "Parameter '%(par)s' repeated more than once.", par=param)) ptype = scview._ptypes[param] if ptype is bool and strval is not None: raise SubcmdError( _("@info", "Parameter '%(par)s' is a flag, no value expected.", par=param)) if ptype is not bool and strval is None: raise SubcmdError( _("@info", "Value expected for parameter '%(par)s'.", par=param)) val = scview._defvals[param] if ptype is bool: val = not val val_lst = [] if strval is not None: if not scview._seplists[param]: try: val = ptype(strval) except: raise SubcmdError( _("@info", "Cannot convert value '%(val)s' to " "parameter '%(par)s' into expected " "type '%(type)s'.", val=strval, par=param, type=ptype)) val_lst = [val] else: tmplst = strval.split(",") try: val = [ptype(x) for x in tmplst] except: raise SubcmdError( _("@info", "Cannot convert value '%(val)s' to " "parameter '%(par)s' into list of " "elements of expected type '%(type)s'.", val=strval, par=param, type=ptype)) val_lst = val # Assure admissibility of parameter values. admvals = scview._admvals[param] if admvals is not None: for val in val_lst: if val not in admvals: raise SubcmdError( _("@info", "Value '%(val)s' to parameter '%(par)s' " "not in the admissible set: %(vallist)s.", val=strval, par=param, vallist=format_item_list(admvals))) param_accepted = True if scview._multivals[param] or scview._seplists[param]: if param not in param_vals[subcmd]: param_vals[subcmd][param] = [] param_vals[subcmd][param].extend(val_lst) else: param_vals[subcmd][param] = val if not param_accepted and param not in nacc_params: nacc_params.append(param) # Assure that all mandatory parameters have been supplied to each # issued subcommand, and set defaults for all optional parameters. for subcmd in subcmds: scview = self._scviews[subcmd] for param in scview._ptypes: if param in param_vals[subcmd]: # Option explicitly given, skip. continue if scview._mandatorys[param]: raise SubcmdError( _("@info", "Mandatory parameter '%(par)s' to subcommand " "'%(cmd)s' not issued.", par=param, cmd=subcmd)) param_vals[subcmd][param] = scview._defvals[param] # Create dictionary of parameter objects. class ParamsTemp (object): pass params = {} for subcmd in subcmds: scview = self._scviews[subcmd] params[subcmd] = ParamsTemp() for param, val in param_vals[subcmd].iteritems(): # Construct valid attribute name out of parameter name. to_attr_rx = re.compile(r"[^a-z0-9]+", re.I|re.U) attr = scview._attrnames[param] if not attr: attr = to_attr_rx.sub("_", param) if not attr[:1].isalpha(): attr = "x" + attr params[subcmd].__dict__[attr] = val return params, nacc_params
def main(): locale.setlocale(locale.LC_ALL, "") # Get defaults for command line options from global config. cfgsec = pology_config.section("posieve") def_do_skip = cfgsec.boolean("skip-on-error", True) def_msgfmt_check = cfgsec.boolean("msgfmt-check", False) def_skip_obsolete = cfgsec.boolean("skip-obsolete", False) # Setup options and parse the command line. usage = _("@info command usage", "%(cmd)s [OPTIONS] SIEVE [POPATHS...]", cmd="%prog") desc = _( "@info command description", "Apply sieves to PO paths, which may be either single PO files or " "directories to search recursively for PO files. " "Some of the sieves only examine PO files, while others " "modify them as well. " "The first non-option argument is the sieve name; " "a list of several comma-separated sieves can be given too.") ver = _("@info command version", u"%(cmd)s (Pology) %(version)s\n" u"Copyright © 2007, 2008, 2009, 2010 " u"Chusslove Illich (Часлав Илић) <%(email)s>", cmd="%prog", version=version(), email="*****@*****.**") opars = ColorOptionParser(usage=usage, description=desc, version=ver) opars.add_option( "-a", "--announce-entry", action="store_true", dest="announce_entry", default=False, help=_("@info command line option description", "Announce that header or message is just about to be sieved.")) opars.add_option("-b", "--skip-obsolete", action="store_true", dest="skip_obsolete", default=def_skip_obsolete, help=_("@info command line option description", "Do not sieve obsolete messages.")) opars.add_option( "-c", "--msgfmt-check", action="store_true", dest="msgfmt_check", default=def_msgfmt_check, help=_("@info command line option description", "Check catalogs by %(cmd)s and skip those which do not pass.", cmd="msgfmt -c")) opars.add_option("-u", "--single-entry", metavar=_("@info command line value placeholder", "ENTRY_NUMBER"), action="store", dest="single_entry", default=0, help=_("@info command line option description", "Only perform the check on this ENTRY_NUMBER.")) opars.add_option( "--force-sync", action="store_true", dest="force_sync", default=False, help=_("@info command line option description", "Force rewriting of all messages, whether modified or not.")) opars.add_option("-H", "--help-sieves", action="store_true", dest="help_sieves", default=False, help=_("@info command line option description", "Show help for applied sieves.")) opars.add_option("--issued-params", action="store_true", dest="issued_params", default=False, help=_( "@info command line option description", "Show all issued sieve parameters " "(from command line and user configuration).")) opars.add_option("-l", "--list-sieves", action="store_true", dest="list_sieves", default=False, help=_("@info command line option description", "List available internal sieves.")) opars.add_option("--list-options", action="store_true", dest="list_options", default=False, help=_("@info command line option description", "List the names of available options.")) opars.add_option("--list-sieve-names", action="store_true", dest="list_sieve_names", default=False, help=_("@info command line option description", "List the names of available internal sieves.")) opars.add_option("--list-sieve-params", action="store_true", dest="list_sieve_params", default=False, help=_("@info command line option description", "List the parameters known to issued sieves.")) opars.add_option("-m", "--output-modified", metavar=_("@info command line value placeholder", "FILE"), action="store", dest="output_modified", default=None, help=_("@info command line option description", "Output names of modified files into FILE.")) opars.add_option("--no-skip", action="store_false", dest="do_skip", default=def_do_skip, help=_( "@info command line option description", "Do not try to skip catalogs which signal errors.")) opars.add_option("--no-sync", action="store_false", dest="do_sync", default=True, help=_("@info command line option description", "Do not write any modifications to catalogs.")) opars.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False, help=_( "@info command line option description", "Do not display any progress info " "(does not influence sieves themselves).")) opars.add_option("-s", metavar=_("@info command line value placeholder", "NAME[:VALUE]"), action="append", dest="sieve_params", default=[], help=_("@info command line option description", "Pass a parameter to sieves.")) opars.add_option( "-S", metavar=_("@info command line value placeholder", "NAME[:VALUE]"), action="append", dest="sieve_no_params", default=[], help=_( "@info command line option description", "Remove a parameter to sieves " "(e.g. if it was issued through user configuration).")) opars.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help=_("@info command line option description", "Output more detailed progress information.")) add_cmdopt_filesfrom(opars) add_cmdopt_incexc(opars) add_cmdopt_colors(opars) (op, free_args) = opars.parse_args(str_to_unicode(sys.argv[1:])) if op.list_options: report(list_options(opars)) sys.exit(0) if len(free_args) < 1 and not (op.list_sieves or op.list_sieve_names): error(_("@info", "No sieve to apply given.")) op.raw_sieves = [] op.raw_paths = [] if len(free_args) > 2 and op.single_entry != 0: error( _("@info", "With single entry mode, you can only give one input file.")) if len(free_args) >= 1: op.raw_sieves = free_args[0] op.raw_paths = free_args[1:] # Could use some speedup. try: import psyco psyco.full() except ImportError: pass set_coloring_globals(ctype=op.coloring_type, outdep=(not op.raw_colors)) # Dummy-set all internal sieves as requested if sieve listing required. sieves_requested = [] if op.list_sieves or op.list_sieve_names: # Global sieves. modpaths = glob.glob(os.path.join(datadir(), "sieve", "[a-z]*.py")) modpaths.sort() for modpath in modpaths: sname = os.path.basename(modpath)[:-3] # minus .py sname = sname.replace("_", "-") sieves_requested.append(sname) # Language-specific sieves. modpaths = glob.glob( os.path.join(datadir(), "lang", "*", "sieve", "[a-z]*.py")) modpaths.sort() for modpath in modpaths: sname = os.path.basename(modpath)[:-3] # minus .py sname = sname.replace("_", "-") lang = os.path.basename(os.path.dirname(os.path.dirname(modpath))) sieves_requested.append(lang + ":" + sname) # No need to load and setup sieves if only listing sieve names requested. if op.list_sieve_names: report("\n".join(sieves_requested)) sys.exit(0) # Load sieve modules from supplied names in the command line. if not sieves_requested: sieves_requested = op.raw_sieves.split(",") sieve_modules = [] for sieve_name in sieves_requested: # Resolve sieve file. if not sieve_name.endswith(".py"): # One of internal sieves. if ":" in sieve_name: # Language-specific internal sieve. lang, name = sieve_name.split(":") sieve_path_base = os.path.join("lang", lang, "sieve", name) else: sieve_path_base = os.path.join("sieve", sieve_name) sieve_path_base = sieve_path_base.replace("-", "_") + ".py" sieve_path = os.path.join(datadir(), sieve_path_base) else: # Sieve name is its path. sieve_path = sieve_name try: sieve_file = open(unicode_to_str(sieve_path)) # ...unicode_to_str because of exec below. except IOError: error(_("@info", "Cannot load sieve '%(file)s'.", file=sieve_path)) # Load file into new module. sieve_mod_name = "sieve_" + str(len(sieve_modules)) sieve_mod = imp.new_module(sieve_mod_name) exec sieve_file in sieve_mod.__dict__ sieve_file.close() sys.modules[sieve_mod_name] = sieve_mod # to avoid garbage collection sieve_modules.append((sieve_name, sieve_mod)) if not hasattr(sieve_mod, "Sieve"): error( _("@info", "Module '%(file)s' does not define %(classname)s class.", file=sieve_path, classname="Sieve")) # Setup sieves (description, known parameters...) pp = ParamParser() snames = [] for name, mod in sieve_modules: scview = pp.add_subcmd(name) if hasattr(mod, "setup_sieve"): mod.setup_sieve(scview) snames.append(name) # If info on sieves requested, report and exit. if op.list_sieves: report(_("@info", "Available internal sieves:")) report(pp.listcmd(snames)) sys.exit(0) elif op.list_sieve_params: params = set() for scview in pp.cmdviews(): params.update(scview.params(addcol=True)) report("\n".join(sorted(params))) sys.exit(0) elif op.help_sieves: report(_("@info", "Help for sieves:")) report("") report(pp.help(snames)) sys.exit(0) # Prepare sieve parameters for parsing. sieve_params = list(op.sieve_params) # - append paramaters according to configuration sieve_params.extend(read_config_params(pp.cmdviews(), sieve_params)) # - remove paramaters according to command line if op.sieve_no_params: sieve_params_mod = [] for parspec in sieve_params: if parspec.split(":", 1)[0] not in op.sieve_no_params: sieve_params_mod.append(parspec) sieve_params = sieve_params_mod # If assembly of issued parameters requested, report and exit. if op.issued_params: escparams = [] for parspec in sieve_params: if ":" in parspec: param, value = parspec.split(":", 1) escparam = "%s:%s" % (param, escape_sh(value)) else: escparam = parspec escparams.append(escparam) fmtparams = " ".join(["-s%s" % x for x in sorted(escparams)]) if fmtparams: report(fmtparams) sys.exit(0) # Parse sieve parameters. sparams, nacc_params = pp.parse(sieve_params, snames) if nacc_params: error( _("@info", "Parameters not accepted by any of issued subcommands: " "%(paramlist)s.", paramlist=format_item_list(nacc_params))) # ======================================== # FIXME: Think of something less ugly. # Add as special parameter to each sieve: # - root paths from which the catalogs are collected # - whether destination independent coloring is in effect # - test function for catalog selection root_paths = [] if op.raw_paths: root_paths.extend(op.raw_paths) if op.files_from: for ffpath in op.files_from: root_paths.extend(collect_paths_from_file(ffpath)) if not op.raw_paths and not op.files_from: root_paths = ["."] is_cat_included = build_path_selector(incnames=op.include_names, incpaths=op.include_paths, excnames=op.exclude_names, excpaths=op.exclude_paths) for p in sparams.values(): p.root_paths = root_paths p.raw_colors = op.raw_colors p.is_cat_included = is_cat_included # ======================================== # Create sieves. sieves = [] for name, mod in sieve_modules: sieves.append(mod.Sieve(sparams[name])) # Get the message monitoring indicator from the sieves. # Monitor unless all sieves have requested otherwise. use_monitored = False for sieve in sieves: if getattr(sieve, "caller_monitored", True): use_monitored = True break if op.verbose and not use_monitored: report(_("@info:progress", "--> Not monitoring messages.")) # Get the sync indicator from the sieves. # Sync unless all sieves have requested otherwise, # and unless syncing is disabled globally in command line. do_sync = False for sieve in sieves: if getattr(sieve, "caller_sync", True): do_sync = True break if not op.do_sync: do_sync = False if op.verbose and not do_sync: report(_("@info:progress", "--> Not syncing after sieving.")) # Open in header-only mode if no sieve has message processor. # Categorize sieves by the presence of message/header processors. use_headonly = True header_sieves = [] header_sieves_last = [] message_sieves = [] for sieve in sieves: if hasattr(sieve, "process"): use_headonly = False message_sieves.append(sieve) if hasattr(sieve, "process_header"): header_sieves.append(sieve) if hasattr(sieve, "process_header_last"): header_sieves_last.append(sieve) if op.verbose and use_headonly: report(_("@info:progress", "--> Opening catalogs in header-only mode.")) # Collect catalog paths. fnames = collect_paths_cmdline(rawpaths=op.raw_paths, incnames=op.include_names, incpaths=op.include_paths, excnames=op.exclude_names, excpaths=op.exclude_paths, filesfrom=op.files_from, elsecwd=True, respathf=collect_catalogs, abort=True) if op.do_skip: errwarn = warning errwarn_on_msg = warning_on_msg else: errwarn = error errwarn_on_msg = error_on_msg # Prepare inline progress indicator. if not op.quiet: update_progress = init_file_progress(fnames, addfmt=t_("@info:progress", "Sieving: %(file)s")) # Sieve catalogs. modified_files = [] for fname in fnames: if op.verbose: report(_("@info:progress", "Sieving %(file)s...", file=fname)) elif not op.quiet: update_progress(fname) if op.msgfmt_check: d1, oerr, ret = collect_system( ["msgfmt", "-o", "/dev/null", "-c", fname]) if ret != 0: oerr = oerr.strip() errwarn( _("@info:progress", "%(file)s: %(cmd)s check failed:\n" "%(msg)s", file=fname, cmd="msgfmt -c", msg=oerr)) warning( _("@info:progress", "Skipping catalog due to syntax check failure.")) continue try: cat = Catalog(fname, monitored=use_monitored, headonly=use_headonly, single_entry=int(op.single_entry)) except CatalogSyntaxError, e: errwarn( _("@info:progress", "%(file)s: Parsing failed: %(msg)s", file=fname, msg=e)) warning( _("@info:progress", "Skipping catalog due to parsing failure.")) continue skip = False # First run all header sieves. if header_sieves and op.announce_entry: report( _("@info:progress", "Sieving header of %(file)s...", file=fname)) for sieve in header_sieves: try: ret = sieve.process_header(cat.header, cat) except SieveCatalogError, e: errwarn( _("@info:progress", "%(file)s:header: Sieving failed: %(msg)s", file=fname, msg=e)) skip = True break if ret not in (None, 0): break
def process(self, msg, cat): if not msg.translated: return failed_w_suggs = [] for msgstr in msg.msgstr: # Skip message if explicitly requested. if flag_no_check_spell in manc_parse_flag_list(msg, "|"): continue # Apply precheck filters. for pfilter, pfname in self.pfilters: try: # try as type F1A hook msgstr = pfilter(msgstr) except TypeError: try: # try as type F3* hook msgstr = pfilter(msgstr, msg, cat) except TypeError: raise SieveError( _("@info", "Cannot execute filter '%(filt)s'.", filt=pfname)) # Split text into words. # TODO: See to use markup types somehow. words = proper_words(msgstr, True, cat.accelerator(), msg.format) # Eliminate from checking words matching the skip regex. if self.skip_rx: words = [x for x in words if not self.skip_rx.search(x)] # Eliminate from checking words explicitly listed as good. locally_ignored = manc_parse_list(msg, elist_well_spelled, ",") words = [x for x in words if x not in locally_ignored] for word in words: if not self.checker.check(word): failed = True self.unknown_words.add(word) if not self.words_only or self.lokalize: suggs = self.checker.suggest(word) incmp = False if len(suggs) > 5: # do not put out too many words suggs = suggs[:5] incmp = True failed_w_suggs.append((word, suggs)) if not self.words_only: if suggs: fsuggs = format_item_list(suggs, incmp=incmp) report_on_msg( _("@info", "Unknown word '%(word)s' " "(suggestions: %(wordlist)s).", word=word, wordlist=fsuggs), msg, cat) else: report_on_msg( _("@info", "Unknown word '%(word)s'.", word=word), msg, cat) if self.lokalize and failed_w_suggs: repls = [_("@label", "Spelling errors:")] for word, suggs in failed_w_suggs: if suggs: fmtsuggs = format_item_list(suggs, incmp=incmp) repls.append( _("@item", "%(word)s (suggestions: %(wordlist)s)", word=word, wordlist=fmtsuggs)) else: repls.append("%s" % (word)) report_msg_to_lokalize(msg, cat, cjoin(repls, "\n"))
def spcheck(text, msg, cat): # Check if new spell checker should be constructed. if lang is not None: clang = lang elif cat.language() is not None: clang = cat.language() elif provider != "aspell-raw": clang = enchant_cfg.string("language") else: clang = None if not clang: raise PologyError( _("@info", "Cannot determine language for catalog '%(file)s'.", file=cat.filename)) if envs is not None: cenvs = envs elif cat.environment() is not None: cenvs = cat.environment() elif provider != "aspell-raw": envs_str = enchant_cfg.string("environment") cenvs = envs_str.split(",") if envs_str else [] else: cenvs = [] ckey = (clang, tuple(cenvs)) if ckey not in checkers: if provider != "aspell-raw": checkers[ckey] = _construct_enchant(provider, clang, cenvs, encoding, variety, suponly) else: checkers[ckey] = _construct_aspell(clang, cenvs, encoding, variety, extopts, suponly) checker = checkers[ckey] # Prepare shortcut reports. if spanrep: defret = [] else: defret = 0 # Skip message if explicitly requested. if flag_no_check_spell in manc_parse_flag_list(msg, "|"): return defret # Split text into words and spans: [(word, (start, end)), ...] word_spans = wsplit(text, msg, cat) # Ignore words explicitly listed as good. ignored_words = set(manc_parse_list(msg, elist_well_spelled, ",")) word_spans = [x for x in word_spans if x[0] not in ignored_words] spans = [] for word, span in word_spans: encword = word.encode(encoding) if not checker.check(encword): encsuggs = checker.suggest(encword) maxsugg = 5 # limit to some reasonable number incmp = False if maxsugg > 0 and len(encsuggs) > maxsugg: encsuggs = encsuggs[:maxsugg] incmp = True suggs = [x.decode(encoding) for x in encsuggs] if maxsugg != 0 and suggs: fmtsuggs = format_item_list(suggs, incmp=incmp) snote = _("@info", "Unknown word '%(word)s' " "(suggestions: %(wordlist)s).", word=word, wordlist=fmtsuggs) else: snote = _("@info", "Unknown word '%(word)s'.", word=word) spans.append(span + (snote, )) if spanrep: return spans else: for span in spans: if span[2:]: report_on_msg(span[2], msg, cat) return len(spans)
def add_param (self, name, ptype, mandatory=False, attrname=None, defval=None, admvals=None, multival=False, seplist=False, metavar=None, desc=None): """ Define a parameter. A parameter is at minimum defined by its name and value type, and may be optional or mandatory. Optional parameter will be set to the supplied default value if not encountered during parsing. Default value must be of the given parameter type (in the sense of C{isinstance()}) or C{None}. Default value of C{None} can be used to be able to check if the parameter has been parsed at all. If parameter type is boolean, then the default value has a special meaning: the parameter is always parsed without an argument (a flag), and its value will become negation of the default value. If parameter value is not arbitrary for the given type, the set of admissible values can be defined too. Parameter can be used to collect a list of values, in two ways, or both combined. One is by repeating the parameter several times with different values, and another by a single parameter value itself being a comma-separated list of values (in which case the values are parsed into elements of requested type). For such parameters the default value should be a list too (or C{None}). For help purposes, parameter may be given a description and metavariable to represent its value. If the parameter being added to current subcommand has the name same as a previously defined parameter to another subcommand, then the current parameter shares semantics with the old one. This means that the type and list nature of current parameter must match that of the previous one (i.e. C{ptype}, C{multival}, and C{seplist} must have same values). Double-newline in description string splits text into paragraphs. @param name: parameter name @type name: string @param ptype: type of the expected argument @type ptype: type @param mandatory: whether parameter is mandatory @type mandatory: bool @param attrname: explicit name for the object attribute under which the parsed parameter value is stored (auto-derived if C{None}) @type attrname: string @param defval: default value for the argument @type defval: instance of C{ptype} or C{None} @param admvals: admissible values for the argument @type admvals: list of C{ptype} elements or C{None} @param multival: whether parameter can be repeated for list of values @type multival: bool @param seplist: whether parameter is a comma-separated list of values @type seplist: bool @param metavar: name for parameter's value @type metavar: string or C{None} @param desc: description of the parameter @type desc: string or C{None} """ param = name islist = multival or seplist if defval is not None and not islist and not isinstance(defval, ptype): raise SubcmdError( _("@info", "Trying to add parameter '%(par)s' to " "subcommand '%(cmd)s' with default value '%(val)s' " "different from its stated type '%(type)s'.", par=param, cmd=self._subcmd, val=defval, type=ptype)) if defval is not None and islist and not _isinstance_els(defval, ptype): raise SubcmdError( _("@info", "Trying to add parameter '%(par)s' to " "subcommand '%(cmd)s' with default value '%(val)s' " "which contains some elements different from their " "stated type '%(type)s'.", par=param, cmd=self._subcmd, val=defval, type=ptype)) if defval is not None and admvals is not None and defval not in admvals: raise SubcmdError( _("@info", "Trying to add parameter '%(par)s' to " "subcommand '%(cmd)s' with default value '%(val)s' " "not from the admissible set: %(vallist)s.", par=param, cmd=self._subcmd, val=defval, vallist=format_item_list(admvals))) if param in self._ptypes: raise SubcmdError( _("@info", "Trying to add parameter '%(par)s' to subcommand " "'%(cmd)s' more than once.", par=param, cmd=self._subcmd)) if islist and not isinstance(defval, (type(None), tuple, list)): raise SubcmdError( _("@info", "Parameter '%(par)s' to subcommand '%(cmd)s' " "is stated to be list-valued, but the default value " "is not given as a list or tuple.", par=param, cmd=self._subcmd)) general_ptype = None general_multival = None general_seplist = None for scview in self._parent._scviews.itervalues(): general_ptype = scview._ptypes.get(param) general_multival = scview._multivals.get(param) general_seplist = scview._seplists.get(param) if general_ptype is not None and ptype is not general_ptype: raise SubcmdError( _("@info", "Trying to add parameter '%(par)s' to " "subcommand '%(cmd)s' with '%(field)s' field " "different from the same parameter in other subcommands.", par=param, cmd=self._subcmd, field="ptype")) if general_multival is not None and multival != general_multival: raise SubcmdError( _("@info", "Trying to add parameter '%(par)s' to " "subcommand '%(cmd)s' with '%(field)s' field " "different from the same parameter in other subcommands.", par=param, cmd=self._subcmd, field="multival")) if general_seplist is not None and seplist != general_seplist: raise SubcmdError( _("@info", "Trying to add parameter '%(par)s' to " "subcommand '%(cmd)s' with '%(field)s' field " "different from the same parameter in other subcommands.", par=param, cmd=self._subcmd, field="seplist")) self._ptypes[param] = ptype self._mandatorys[param] = mandatory self._defvals[param] = defval self._admvals[param] = admvals self._multivals[param] = multival self._seplists[param] = seplist self._metavars[param] = metavar self._descs[param] = desc self._attrnames[param] = attrname self._ordered.append(param)
def _read_propcons (self, fpath): if not os.path.isfile(fpath): raise SieveError(_("@info", "Property constraint file '%(file)s' " "does not exist.", file=fpath)) lines = open(fpath).read().decode("UTF-8").split("\n") if not lines[-1]: lines.pop() cmrx = re.compile(r"#.*") # Constraints collected as list of tuples: # (compiled key regex, string key regex, # compiled value regex, string value regex, # string of flags) propcons = [] lno = 0 def mkerr (problem): return _("@info", "Invalid property map constraint " "at %(file)s:%(line)d: %(snippet)s.", file=fpath, line=lno, snippet=problem) known_flags = set(("i", "I", "t", "r")) for line in lines: lno += 1 line = cmrx.sub("", line).strip() if not line: continue sep = line[0] if sep.isalnum(): raise SieveError(mkerr(_("@item:intext", "alphanumeric separators " "not allowed"))) lst = line.split(sep) if len(lst) < 4: raise SieveError(mkerr(_("@item:intext", "too few separators"))) elif len(lst) > 4: raise SieveError(mkerr(_("@item:intext", "too many separators"))) d1, keyrxstr, valrxstr, flags = lst unknown_flags = set(flags).difference(known_flags) if unknown_flags: fmtflags = format_item_list(sorted(unknown_flags), quoted=True) raise SieveError(mkerr(_("@item:intext", "unknown flags %(flaglist)s", flaglist=fmtflags))) rxs = [] for rxstr, iflag in ((keyrxstr, "I"), (valrxstr, "i")): rxfls = re.U if iflag in flags: rxfls |= re.I wrxstr = r"^(?:%s)$" % rxstr try: rx = re.compile(wrxstr, rxfls) except: raise SieveError(mkerr(_("@item:intext", "invalid regular expression " "'%(regex)s'", regex=rxstr))) rxs.append(rx) keyrx, valrx = rxs propcons.append((keyrx, keyrxstr, valrx, valrxstr, flags)) return propcons
def main (): locale.setlocale(locale.LC_ALL, "") # Get defaults for command line options from global config. cfgsec = pology_config.section("poediff") def_do_merge = cfgsec.boolean("merge", True) # Setup options and parse the command line. usage = _("@info command usage", "%(cmd)s [OPTIONS] FILE1 FILE2\n" "%(cmd)s [OPTIONS] DIR1 DIR2\n" "%(cmd)s -c VCS [OPTIONS] [PATHS...]", cmd="%prog") desc = _("@info command description", "Create embedded diffs of PO files.") ver = _("@info command version", u"%(cmd)s (Pology) %(version)s\n" u"Copyright © 2009, 2010 " u"Chusslove Illich (Часлав Илић) <%(email)s>", cmd="%prog", version=version(), email="*****@*****.**") showvcs = list(set(available_vcs()).difference(["none"])) showvcs.sort() opars = ColorOptionParser(usage=usage, description=desc, version=ver) opars.add_option( "-b", "--skip-obsolete", action="store_true", dest="skip_obsolete", default=False, help=_("@info command line option description", "Do not diff obsolete messages.")) opars.add_option( "-c", "--vcs", metavar=_("@info command line value placeholder", "VCS"), dest="version_control", help=_("@info command line option description", "Paths are under version control by given VCS; " "can be one of: %(vcslist)s.", vcslist=format_item_list(showvcs))) opars.add_option( "--list-options", action="store_true", dest="list_options", default=False, help=_("@info command line option description", "List the names of available options.")) opars.add_option( "--list-vcs", action="store_true", dest="list_vcs", default=False, help=_("@info command line option description", "List the keywords of known version control systems.")) opars.add_option( "-n", "--no-merge", action="store_false", dest="do_merge", default=def_do_merge, help=_("@info command line option description", "Do not try to indirectly pair messages by merging catalogs.")) opars.add_option( "-o", "--output", metavar=_("@info command line value placeholder", "POFILE"), dest="output", help=_("@info command line option description", "Output diff catalog to a file instead of stdout.")) opars.add_option( "-p", "--paired-only", action="store_true", dest="paired_only", default=False, help=_("@info command line option description", "When two directories are diffed, ignore catalogs which " "are not present in both directories.")) opars.add_option( "-q", "--quiet", action="store_true", dest="quiet", default=False, help=_("@info command line option description", "Do not display any progress info.")) opars.add_option( "-Q", "--quick", action="store_true", dest="quick", default=False, help=_("@info command line option description", "Equivalent to %(opt)s.", opt="-bns")) opars.add_option( "-r", "--revision", metavar=_("@info command line value placeholder", "REV1[:REV2]"), dest="revision", help=_("@info command line option description", "Revision from which to diff to current working copy, " "or from first to second revision (if VCS is given).")) opars.add_option( "-s", "--strip-headers", action="store_true", dest="strip_headers", default=False, help=_("@info command line option description", "Do not diff headers and do not write out the top header " "(resulting output cannot be used as patch).")) opars.add_option( "-U", "--update-effort", action="store_true", dest="update_effort", default=False, help=_("@info command line option description", "Instead of outputting the diff, calculate and output " "an estimate of the effort that was needed to update " "the translation from old to new paths. " "Ignores %(opt1)s and %(opt1)s options.", opt1="-b", opt2="-n")) add_cmdopt_colors(opars) (op, free_args) = opars.parse_args(str_to_unicode(sys.argv[1:])) if op.list_options: report(list_options(opars)) sys.exit(0) if op.list_vcs: report("\n".join(showvcs)) sys.exit(0) # Could use some speedup. try: import psyco psyco.full() except ImportError: pass set_coloring_globals(ctype=op.coloring_type, outdep=(not op.raw_colors)) if op.quick: op.do_merge = False op.skip_obsolete = True op.strip_headers = True # Create VCS. vcs = None if op.version_control: if op.version_control not in available_vcs(flat=True): error_wcl(_("@info", "Unknown VCS '%(vcs)s' selected.", vcs=op.version_control)) vcs = make_vcs(op.version_control) # Sanity checks on paths. paths = free_args if not vcs: if len(paths) != 2: error_wcl(_("@info", "Exactly two paths are needed for diffing.")) for path in paths: if not os.path.exists(path): error_wcl("path does not exist: %s" % path) p1, p2 = paths if (not ( (os.path.isfile(p1) and (os.path.isfile(p2))) or (os.path.isdir(p1) and (os.path.isdir(p2)))) ): error_wcl(_("@info", "Both paths must be either files or directories.")) else: # Default to current working dir if no paths given. paths = paths or ["."] for path in paths: if not os.path.exists(path): error_wcl(_("@info", "Path '%(path)s' does not exist.", path=path)) if not vcs.is_versioned(path): error_wcl(_("@info", "Path '%(path)s' is not under version control.", path=path)) # Collect and pair PO files in given paths. # Each pair specification is in the form of # ((path1, path2), (vpath1, vpath2)) # where path* are the real paths, and vpath* the visual paths to be # presented in diff output. if not vcs: fpairs = collect_file_pairs(paths[0], paths[1], op.paired_only) pspecs = [(x, x) for x in fpairs] else: lst = op.revision and op.revision.split(":", 1) or [] if len(lst) > 2: error_wcl(_("@info", "Too many revisions given: %(revlist)s.", revspec=format_item_list(lst))) elif len(lst) == 2: revs = lst # diff between revisions elif len(lst) == 1: revs = [lst[0], None] # diff from revision to working copy else: revs = ["", None] # diff from head to working copy # Replace original paths with modified/added catalogs. paths_nc = [] for path in paths: for path in vcs.to_commit(path): if path.endswith(".po") or path.endswith(".pot"): paths_nc.append(path) paths = paths_nc paths.sort() pspecs = collect_pspecs_from_vcs(vcs, paths, revs, op.paired_only) if not op.update_effort: ecat, ndiffed = diff_pairs(pspecs, op.do_merge, colorize=(not op.output), shdr=op.strip_headers, noobs=op.skip_obsolete, quiet=op.quiet) if ndiffed > 0: hmsgctxt = ecat.header.get_field_value(EDST.hmsgctxt_field) lines = [] msgs = list(ecat) if not op.strip_headers: msgs.insert(0, ecat.header.to_msg()) for msg in msgs: if op.strip_headers and msg.msgctxt == hmsgctxt: sepl = [] sepl += [msg.manual_comment[0]] sepl += msg.msgid.split("\n")[:2] lines.extend(["# %s\n" % x for x in sepl]) lines.append("\n") else: lines.extend(msg.to_lines(force=True, wrapf=ecat.wrapf())) diffstr = cjoin(lines)[:-1] # remove last newline if op.output: file = open(op.output, "w") file.write(diffstr.encode(ecat.encoding())) file.close() else: report(diffstr) else: updeff = pairs_update_effort(pspecs, quiet=op.quiet) ls = [] for kw, desc, val, fmtval in updeff: ls.append(_("@info", "%(quantity)s: %(value)s", quantity=desc, value=fmtval)) report("\n".join(ls)) # Clean up. cleanup_tmppaths()
def finalize(self): # If template correspondence requested, handle POTs without POs. if self.template_subdirs: # Collect all catalogs in template subdirs. tpaths = collect_catalogs(self.template_subdirs) tpaths = filter(self.p.is_cat_included, tpaths) # Filter to have only POTs remain. tpaths = [x for x in tpaths if x.endswith(".pot")] # Filter to leave out matched templates. tpaths = [x for x in tpaths if x not in self.matched_templates] # Add stats on all unmatched templates. for tpath in tpaths: cat = Catalog(tpath, monitored=False) self.process_header(cat.header, cat) for msg in cat: self.process(msg, cat) # Map template to translation subdirs. for tpath in tpaths: tsubdir = os.path.dirname(tpath) subdir = tsubdir.replace(self.tspec_repl, self.tspec_srch, 1) self.mapped_template_subdirs[tsubdir] = subdir # If completeness limit in effect, eliminate catalogs not passing it. if self.p.mincomp is not None: ncounts = {} ninccats = {} for filename, count in self.counts.iteritems(): cr = float(count["trn"][0]) / (count["tot"][0] or 1) if cr >= self.p.mincomp: ncounts[filename] = count inccat = self.incomplete_catalogs.get(filename) if inccat is not None: ninccats[filename] = inccat self.counts = ncounts self.incomplete_catalogs = ninccats # Assemble sets of total counts by requested divisions. count_overall = self._count_zero() counts_bydir = {} filenames_bydir = {} for filename, count in self.counts.iteritems(): count_overall = self._count_sum(count_overall, count) if self.p.bydir: cdir = os.path.dirname(filename) if cdir in self.mapped_template_subdirs: # Pretend templates-only are within language subdir. cdir = self.mapped_template_subdirs[cdir] if cdir not in counts_bydir: counts_bydir[cdir] = self._count_zero() filenames_bydir[cdir] = [] counts_bydir[cdir] = self._count_sum(counts_bydir[cdir], count) filenames_bydir[cdir].append(filename) # Arrange sets into ordered list with titles. counts = [] if self.p.bydir: cdirs = counts_bydir.keys() cdirs.sort() for cdir in cdirs: if self.p.byfile: self._sort_equiv_filenames(filenames_bydir[cdir]) for filename in filenames_bydir[cdir]: counts.append((filename, self.counts[filename], False)) counts.append(("%s/" % cdir, counts_bydir[cdir], False)) counts.append((_("@item:intable sum of all other entries", "(overall)"), count_overall, True)) elif self.p.byfile: filenames = self.counts.keys() self._sort_equiv_filenames(filenames) for filename in filenames: counts.append((filename, self.counts[filename], False)) counts.append((_("@item:intable sum of all other entries", "(overall)"), count_overall, True)) else: counts.append((None, count_overall, False)) # Indicate conspicuously up front modifiers to counting. modstrs = [] if self.p.branch: fmtbranches = format_item_list(self.p.branch) modstrs.append( _("@item:intext", "branches (%(branchlist)s)", branchlist=fmtbranches)) if self.p.maxwords is not None and self.p.minwords is None: modstrs.append( n_("@item:intext", "at most %(num)d word", "at most %(num)d words", num=self.p.maxwords)) if self.p.minwords is not None and self.p.maxwords is None: modstrs.append( n_("@item:intext", "at least %(num)d word", "at least %(num)d words", num=self.p.minwords)) if self.p.minwords is not None and self.p.maxwords is not None: modstrs.append( n_("@item:intext", "from %(num1)d to %(num)d word", "from %(num1)d to %(num)d words", num1=self.p.minwords, num=self.p.maxwords)) if self.p.lspan: modstrs.append( _("@item:intext", "line span %(span)s", span=self.p.lspan)) if self.p.espan: modstrs.append( _("@item:intext", "entry span %(span)s", span=self.p.espan)) if self.p.ondiff: modstrs.append(_("@item:intext", "scaled fuzzy counts")) # Should titles be output in-line or on separate lines. self.inline = False maxtitlecw = 0 if (not self.p.wbar or not self.p.msgbar or not self.p.msgfmt) and (not self.p.table): for title, count, summed in counts: if title is not None: self.inline = True titlecw = len(title) if maxtitlecw < titlecw: maxtitlecw = titlecw # Output statistics in requested forms. for title, count, summed in counts: # Output the title if defined. if title is not None: if self.inline: ntitle = (("%%-%ds" % maxtitlecw) % title) else: ntitle = title # Must color after padding, to avoid it seeing the colors. ntitle = _("@title", "<bold>%(title)s</bold>", title=ntitle) if self.inline: report(ntitle + " ", newline=False) else: report(ntitle) if self.p.table: self._tabular_stats(counts, title, count) if self.p.msgbar: self._msg_bar_stats(counts, title, count, summed) if self.p.wbar: self._w_bar_stats(counts, title, count, summed) if self.p.msgfmt: self._msg_simple_stats(title, count, summed) # Output the table of catalogs which are not fully translated, # if requested. if self.p.incomplete and self.incomplete_catalogs: filenames = self.incomplete_catalogs.keys() self._sort_equiv_filenames(filenames) data = [] # Column of catalog filenames. data.append(filenames) data.append([self.counts[x]["fuz"][0] for x in filenames]) data.append([self.counts[x]["unt"][0] for x in filenames]) data.append([x + y for x, y in zip(data[1], data[2])]) data.append([self.counts[x]["fuz"][1] for x in filenames]) data.append([self.counts[x]["unt"][1] for x in filenames]) data.append([x + y for x, y in zip(data[4], data[5])]) # Columns of the two added. # Column names and formats. coln = [ _("@title:column", "catalog"), _("@title:column fuzzy messages", "msg/f"), _("@title:column untranslated messages", "msg/u"), _("@title:column fuzzy and untranslated messages", "msg/f+u"), _("@title:column words in fuzzy messages", "w/f"), _("@title:column words in untranslated messages", "w/u"), _("@title:column words in fuzzy and untranslated messages", "w/f+u") ] maxfl = max([len(x) for x in filenames]) dfmt = ["%%-%ds" % maxfl, "%d", "%d", "%d", "%d", "%d", "%d"] # Output. report("-") report( tabulate(data, coln=coln, dfmt=dfmt, space=" ", none=u"-", colorize=True)) # Write file names of catalogs which are not fully translated # into a file, if requested. if self.p.incompfile: filenames = sorted(self.incomplete_catalogs.keys()) cmdlenc = locale.getpreferredencoding() ofl = codecs.open(self.p.incompfile, "w", cmdlenc) ofl.writelines([x + "\n" for x in filenames]) ofl.close() if modstrs: report( _("@item:intable", "modifiers: %(modlist)s", modlist=format_item_list(modstrs)))