def main(output, input_file, version): pp = Preprocessor() pp.context.update({ 'VERSION': version, }) pp.out = output pp.do_include(input_file)
def main(output, input_file): pp = Preprocessor() pp.context.update(buildconfig.defines['ALLDEFINES']) substs = buildconfig.substs # Substs taken verbatim. substs_vars = ( 'BIN_SUFFIX', ) for var in substs_vars: pp.context[var] = '"%s"' % substs[var] # Derived values. for key, condition in ( ('IS_MAC', substs['OS_ARCH'] == 'Darwin'), ('IS_LINUX', substs['OS_ARCH'] == 'Linux'), ('IS_TEST_BUILD', substs.get('ENABLE_TESTS') == '1'), ('IS_DEBUG_BUILD', substs.get('MOZ_DEBUG') == '1'), ('CRASHREPORTER', substs.get('MOZ_CRASHREPORTER')), ('IS_ASAN', substs.get('MOZ_ASAN'))): if condition: pp.context[key] = '1' else: pp.context[key] = '0' pp.context.update({ 'XPC_BIN_PATH': '"%s/dist/bin"' % buildconfig.topobjdir, 'CERTS_SRC_DIR': '"%s/build/pgo/certs"' % buildconfig.topsrcdir, }) pp.out = output pp.do_include(input_file)
def main(): parser = argparse.ArgumentParser(description='Find duplicate files in directory.') parser.add_argument('--warning', '-w', action='store_true', help='Only warn about duplicates, do not exit with an error') parser.add_argument('--file', '-f', action='append', dest='dupes_files', default=[], help='Add exceptions to the duplicate list from this file') parser.add_argument('-D', action=DefinesAction) parser.add_argument('-U', action='append', default=[]) parser.add_argument('directory', help='The directory to check for duplicates in') args = parser.parse_args() allowed_dupes = [] for filename in args.dupes_files: pp = Preprocessor() pp.context.update(buildconfig.defines['ALLDEFINES']) if args.D: pp.context.update(args.D) for undefine in args.U: if undefine in pp.context: del pp.context[undefine] pp.out = StringIO() pp.do_filter('substitution') pp.do_include(filename) allowed_dupes.extend([line.partition('#')[0].rstrip() for line in pp.out.getvalue().splitlines()]) find_dupes(args.directory, bail=not args.warning, allowed_dupes=allowed_dupes)
def main(output, input_file): pp = Preprocessor() pp.context.update({ 'VERSION': 'xul%s' % buildconfig.substs['MOZILLA_SYMBOLVERSION'], }) pp.out = output pp.do_include(input_file)
def preprocess(input, parser, defines={}): ''' Preprocess the file-like input with the given defines, and send the preprocessed output line by line to the given parser. ''' pp = Preprocessor() pp.context.update(defines) pp.do_filter('substitution') pp.out = PreprocessorOutputWrapper(pp, parser) pp.do_include(input)
def emit_code(fd, pref_list_filename): pp = Preprocessor() pp.context.update(buildconfig.defines["ALLDEFINES"]) # A necessary hack until MOZ_DEBUG_FLAGS are part of buildconfig.defines. if buildconfig.substs.get("MOZ_DEBUG"): pp.context["DEBUG"] = "1" if buildconfig.substs.get("CPU_ARCH") == "aarch64": pp.context["MOZ_AARCH64"] = True pp.out = StringIO() pp.do_filter("substitution") pp.do_include(pref_list_filename) try: pref_list = yaml.safe_load(pp.out.getvalue()) input_file = os.path.relpath( pref_list_filename, os.environ.get("GECKO_PATH", os.environ.get("TOPSRCDIR")), ) code = generate_code(pref_list, input_file) except (IOError, ValueError) as e: print("{}: error:\n {}\n".format(pref_list_filename, e)) sys.exit(1) # When generating multiple files from a script, the build system treats the # first named output file (StaticPrefListAll.h in this case) specially -- it # is created elsewhere, and written to via `fd`. fd.write(code["static_pref_list_all_h"]) # We must create the remaining output files ourselves. This requires # creating the output directory directly if it doesn't already exist. ensureParentDir(fd.name) init_dirname = os.path.dirname(fd.name) with FileAvoidWrite("StaticPrefsAll.h") as fd: fd.write(code["static_prefs_all_h"]) for group, text in sorted(code["static_pref_list_group_h"].items()): filename = "StaticPrefList_{}.h".format(group) with FileAvoidWrite(os.path.join(init_dirname, filename)) as fd: fd.write(text) for group, text in sorted(code["static_prefs_group_h"].items()): filename = "StaticPrefs_{}.h".format(group) with FileAvoidWrite(filename) as fd: fd.write(text) with FileAvoidWrite(os.path.join(init_dirname, "StaticPrefsCGetters.cpp")) as fd: fd.write(code["static_prefs_c_getters_cpp"]) with FileAvoidWrite("static_prefs.rs") as fd: fd.write(code["static_prefs_rs"])
def main(output, input_file): pp = Preprocessor() pp.context.update({ 'FFI_EXEC_TRAMPOLINE_TABLE': '0', 'HAVE_LONG_DOUBLE': '0', 'TARGET': buildconfig.substs['FFI_TARGET'], 'VERSION': '', }) pp.do_filter('substitution') pp.setMarker(None) pp.out = output pp.do_include(input_file)
def main(output, input_file): pp = Preprocessor() pp.context.update( { "FFI_EXEC_TRAMPOLINE_TABLE": "0", "HAVE_LONG_DOUBLE": "0", "TARGET": buildconfig.substs["FFI_TARGET"], "VERSION": "", } ) pp.do_filter("substitution") pp.setMarker(None) pp.out = output pp.do_include(input_file)
def main(output, input_file, *defines): pp = Preprocessor() pp.context.update({ "FFI_EXEC_TRAMPOLINE_TABLE": "0", "HAVE_LONG_DOUBLE": "0", "TARGET": buildconfig.substs["FFI_TARGET"], "VERSION": "", }) for d in defines: pp.context.update({d: "1"}) pp.do_filter("substitution") pp.setMarker(None) pp.out = output pp.do_include(input_file)
def rnp_preprocess(tmpl, dest, defines): """ Generic preprocessing :param BinaryIO tmpl: open filehandle (read) input :param BinaryIO dest: open filehandle (write) output :param dict defines: result of get_defines() :return boolean: """ pp = Preprocessor() pp.setMarker("%") pp.addDefines(defines) pp.do_filter("substitution") pp.out = dest pp.do_include(tmpl, True) return True
def emit_code(fd, pref_list_filename): pp = Preprocessor() pp.context.update(buildconfig.defines['ALLDEFINES']) # A necessary hack until MOZ_DEBUG_FLAGS are part of buildconfig.defines. if buildconfig.substs.get('MOZ_DEBUG'): pp.context['DEBUG'] = '1' pp.out = BytesIO() pp.do_filter('substitution') pp.do_include(pref_list_filename) try: pref_list = yaml.safe_load(pp.out.getvalue()) code = generate_code(pref_list) except (IOError, ValueError) as e: print('{}: error:\n {}\n'.format(pref_list_filename, e)) sys.exit(1) # When generating multiple files from a script, the build system treats the # first named output file (StaticPrefListAll.h in this case) specially -- it # is created elsewhere, and written to via `fd`. fd.write(code['static_pref_list_all_h']) # We must create the remaining output files ourselves. This requires # creating the output directory directly if it doesn't already exist. ensureParentDir(fd.name) init_dirname = os.path.dirname(fd.name) with FileAvoidWrite('StaticPrefsAll.h') as fd: fd.write(code['static_prefs_all_h']) for group, text in sorted(code['static_pref_list_group_h'].items()): filename = 'StaticPrefList_{}.h'.format(group) with FileAvoidWrite(os.path.join(init_dirname, filename)) as fd: fd.write(text) for group, text in sorted(code['static_prefs_group_h'].items()): filename = 'StaticPrefs_{}.h'.format(group) with FileAvoidWrite(filename) as fd: fd.write(text) with FileAvoidWrite(os.path.join(init_dirname, 'StaticPrefsCGetters.cpp')) as fd: fd.write(code['static_prefs_c_getters_cpp']) with FileAvoidWrite('static_prefs.rs') as fd: fd.write(code['static_prefs_rs'])
def main(): parser = argparse.ArgumentParser( description="Find duplicate files in directory.") parser.add_argument( "--warning", "-w", action="store_true", help="Only warn about duplicates, do not exit with an error", ) parser.add_argument( "--file", "-f", action="append", dest="dupes_files", default=[], help="Add exceptions to the duplicate list from this file", ) parser.add_argument("-D", action=DefinesAction) parser.add_argument("-U", action="append", default=[]) parser.add_argument("directory", help="The directory to check for duplicates in") args = parser.parse_args() allowed_dupes = [] for filename in args.dupes_files: pp = Preprocessor() pp.context.update(buildconfig.defines["ALLDEFINES"]) if args.D: pp.context.update(args.D) for undefine in args.U: if undefine in pp.context: del pp.context[undefine] pp.out = StringIO() pp.do_filter("substitution") pp.do_include(filename) allowed_dupes.extend([ line.partition("#")[0].rstrip() for line in pp.out.getvalue().splitlines() ]) find_dupes(args.directory, bail=not args.warning, allowed_dupes=allowed_dupes)
def main(): parser = argparse.ArgumentParser( description='Find duplicate files in directory.') parser.add_argument( '--warning', '-w', action='store_true', help='Only warn about duplicates, do not exit with an error') parser.add_argument( '--file', '-f', action='append', dest='dupes_files', default=[], help='Add exceptions to the duplicate list from this file') parser.add_argument('-D', action=DefinesAction) parser.add_argument('-U', action='append', default=[]) parser.add_argument('directory', help='The directory to check for duplicates in') args = parser.parse_args() allowed_dupes = [] for filename in args.dupes_files: pp = Preprocessor() pp.context.update(buildconfig.defines['ALLDEFINES']) if args.D: pp.context.update(args.D) for undefine in args.U: if undefine in pp.context: del pp.context[undefine] pp.out = StringIO() pp.do_filter('substitution') pp.do_include(filename) allowed_dupes.extend([ line.partition('#')[0].rstrip() for line in pp.out.getvalue().splitlines() ]) find_dupes(args.directory, bail=not args.warning, allowed_dupes=allowed_dupes)
def main(output, input_file): with open(input_file) as fh: if buildconfig.substs['EXPAND_LIBS_LIST_STYLE'] == 'linkerscript': def cleanup(line): assert line.startswith('INPUT("') assert line.endswith('")') return line[len('INPUT("'):-len('")')] objs = [cleanup(l.strip()) for l in fh.readlines()] else: objs = [l.strip() for l in fh.readlines()] pp = Preprocessor() pp.out = StringIO() pp.do_include(os.path.join(buildconfig.topobjdir, 'buildid.h')) buildid = pp.context['MOZ_BUILDID'] output.write( 'extern const char gToolkitBuildID[] = "%s";' % buildid ) return set(o for o in objs if os.path.splitext(os.path.basename(o))[0] != 'buildid')
class TestLineEndings(unittest.TestCase): """ Unit tests for the Context class """ def setUp(self): self.pp = Preprocessor() self.pp.out = StringIO() self.f = NamedTemporaryFile(mode='wb') def tearDown(self): self.f.close() def createFile(self, lineendings): for line, ending in zip([b'a', b'#literal b', b'c'], lineendings): self.f.write(line + ending) self.f.flush() def testMac(self): self.createFile([b'\x0D'] * 3) self.pp.do_include(self.f.name) self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n') def testUnix(self): self.createFile([b'\x0A'] * 3) self.pp.do_include(self.f.name) self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n') def testWindows(self): self.createFile([b'\x0D\x0A'] * 3) self.pp.do_include(self.f.name) self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n')
class TestLineEndings(unittest.TestCase): """ Unit tests for the Context class """ def setUp(self): self.pp = Preprocessor() self.pp.out = StringIO() self.tempnam = os.tempnam('.') def tearDown(self): os.remove(self.tempnam) def createFile(self, lineendings): f = open(self.tempnam, 'wb') for line, ending in zip(['a', '#literal b', 'c'], lineendings): f.write(line+ending) f.close() def testMac(self): self.createFile(['\x0D']*3) self.pp.do_include(self.tempnam) self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n') def testUnix(self): self.createFile(['\x0A']*3) self.pp.do_include(self.tempnam) self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n') def testWindows(self): self.createFile(['\x0D\x0A']*3) self.pp.do_include(self.tempnam) self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n')
def load_yaml(yaml_path): # First invoke preprocessor.py so that we can use #ifdef JS_SIMULATOR in # the YAML file. pp = Preprocessor() pp.context.update(buildconfig.defines['ALLDEFINES']) pp.out = six.StringIO() pp.do_filter('substitution') pp.do_include(yaml_path) contents = pp.out.getvalue() # Load into an OrderedDict to ensure order is preserved. Note: Python 3.7+ # also preserves ordering for normal dictionaries. # Code based on https://stackoverflow.com/a/21912744. class OrderedLoader(yaml.Loader): pass def construct_mapping(loader, node): loader.flatten_mapping(node) return OrderedDict(loader.construct_pairs(node)) tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG OrderedLoader.add_constructor(tag, construct_mapping) return yaml.load(contents, OrderedLoader)
def generate_symbols_file(output, *args): ''' ''' parser = argparse.ArgumentParser() parser.add_argument('input') parser.add_argument('-D', action=DefinesAction) parser.add_argument('-U', action='append', default=[]) args = parser.parse_args(args) input = os.path.abspath(args.input) pp = Preprocessor() pp.context.update(buildconfig.defines) if args.D: pp.context.update(args.D) for undefine in args.U: if undefine in pp.context: del pp.context[undefine] # Hack until MOZ_DEBUG_FLAGS are simply part of buildconfig.defines if buildconfig.substs['MOZ_DEBUG']: pp.context['DEBUG'] = '1' # Ensure @DATA@ works as expected (see the Windows section further below) if buildconfig.substs['OS_TARGET'] == 'WINNT': pp.context['DATA'] = 'DATA' else: pp.context['DATA'] = '' pp.out = StringIO() pp.do_filter('substitution') pp.do_include(input) symbols = [s.strip() for s in pp.out.getvalue().splitlines() if s.strip()] if buildconfig.substs['OS_TARGET'] == 'WINNT': # A def file is generated for MSVC link.exe that looks like the # following: # LIBRARY library.dll # EXPORTS # symbol1 # symbol2 # ... # # link.exe however requires special markers for data symbols, so in # that case the symbols look like: # data_symbol1 DATA # data_symbol2 DATA # ... # # In the input file, this is just annotated with the following syntax: # data_symbol1 @DATA@ # data_symbol2 @DATA@ # ... # The DATA variable is "simply" expanded by the preprocessor, to # nothing on non-Windows, such that we only get the symbol name on # those platforms, and to DATA on Windows, so that the "DATA" part # is, in fact, part of the symbol name as far as the symbols variable # is concerned. libname, ext = os.path.splitext(os.path.basename(output.name)) assert ext == '.def' output.write('LIBRARY %s\nEXPORTS\n %s\n' % (libname, '\n '.join(symbols))) elif buildconfig.substs['GCC_USE_GNU_LD']: # A linker version script is generated for GNU LD that looks like the # following: # { # global: # symbol1; # symbol2; # ... # local: # *; # }; output.write('{\nglobal:\n %s;\nlocal:\n *;\n};' % ';\n '.join(symbols)) elif buildconfig.substs['OS_TARGET'] == 'Darwin': # A list of symbols is generated for Apple ld that simply lists all # symbols, with an underscore prefix. output.write(''.join('_%s\n' % s for s in symbols)) return set(pp.includes)
def _consume_jar_manifest(self, obj, defines): # Ideally, this would all be handled somehow in the emitter, but # this would require all the magic surrounding l10n and addons in # the recursive make backend to die, which is not going to happen # any time soon enough. # Notably missing: # - DEFINES from config/config.mk # - L10n support # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in # moz.build, but it doesn't matter in dist/bin. pp = Preprocessor() pp.context.update(defines) pp.context.update(self.environment.defines) pp.context.update( AB_CD='en-US', BUILD_FASTER=1, ) pp.out = JarManifestParser() pp.do_include(obj.path) self.backend_input_files |= pp.includes for jarinfo in pp.out: install_target = obj.install_target if jarinfo.base: install_target = mozpath.normpath( mozpath.join(install_target, jarinfo.base)) for e in jarinfo.entries: if e.is_locale: if jarinfo.relativesrcdir: path = mozpath.join(self.environment.topsrcdir, jarinfo.relativesrcdir) else: path = mozpath.dirname(obj.path) src = mozpath.join( path, 'en-US', e.source) elif e.source.startswith('/'): src = mozpath.join(self.environment.topsrcdir, e.source[1:]) else: src = mozpath.join(mozpath.dirname(obj.path), e.source) if '*' in e.source: if e.preprocess: raise Exception('%s: Wildcards are not supported with ' 'preprocessing' % obj.path) def _prefix(s): for p in s.split('/'): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(src)) self._install_manifests[install_target] \ .add_pattern_symlink( prefix, src[len(prefix):], mozpath.join(jarinfo.name, e.output)) continue if not os.path.exists(src): if e.is_locale: raise Exception( '%s: Cannot find %s' % (obj.path, e.source)) if e.source.startswith('/'): src = mozpath.join(self.environment.topobjdir, e.source[1:]) else: # This actually gets awkward if the jar.mn is not # in the same directory as the moz.build declaring # it, but it's how it works in the recursive make, # not that anything relies on that, but it's simpler. src = mozpath.join(obj.objdir, e.source) self._dependencies['install-%s' % install_target] \ .append(mozpath.relpath( src, self.environment.topobjdir)) if e.preprocess: kwargs = {} if src.endswith('.css'): kwargs['marker'] = '%' self._add_preprocess( obj, src, mozpath.join(jarinfo.name, mozpath.dirname(e.output)), mozpath.basename(e.output), defines=defines, **kwargs) else: self._install_manifests[install_target].add_symlink( src, mozpath.join(jarinfo.name, e.output)) manifest = mozpath.normpath(mozpath.join(install_target, jarinfo.name)) manifest += '.manifest' for m in jarinfo.chrome_manifests: self._manifest_entries[manifest].add( m.replace('%', mozpath.basename(jarinfo.name) + '/')) if jarinfo.name != 'chrome': manifest = mozpath.normpath(mozpath.join(install_target, 'chrome.manifest')) entry = 'manifest %s.manifest' % jarinfo.name self._manifest_entries[manifest].add(entry)
class TestPreprocessor(unittest.TestCase): """ Unit tests for the Context class """ def setUp(self): self.pp = Preprocessor() self.pp.out = StringIO() def do_include_compare(self, content_lines, expected_lines): content = "%s" % "\n".join(content_lines) expected = "%s".rstrip() % "\n".join(expected_lines) with MockedOpen({"dummy": content}): self.pp.do_include("dummy") self.assertEqual(self.pp.out.getvalue().rstrip("\n"), expected) def do_include_pass(self, content_lines): self.do_include_compare(content_lines, ["PASS"]) def test_conditional_if_0(self): self.do_include_pass([ "#if 0", "FAIL", "#else", "PASS", "#endif", ]) def test_no_marker(self): lines = [ "#if 0", "PASS", "#endif", ] self.pp.setMarker(None) self.do_include_compare(lines, lines) def test_string_value(self): self.do_include_compare( [ "#define FOO STRING", "#if FOO", "string value is true", "#else", "string value is false", "#endif", ], ["string value is false"], ) def test_number_value(self): self.do_include_compare( [ "#define FOO 1", "#if FOO", "number value is true", "#else", "number value is false", "#endif", ], ["number value is true"], ) def test_conditional_if_0_elif_1(self): self.do_include_pass([ "#if 0", "#elif 1", "PASS", "#else", "FAIL", "#endif", ]) def test_conditional_if_1(self): self.do_include_pass([ "#if 1", "PASS", "#else", "FAIL", "#endif", ]) def test_conditional_if_0_or_1(self): self.do_include_pass([ "#if 0 || 1", "PASS", "#else", "FAIL", "#endif", ]) def test_conditional_if_1_elif_1_else(self): self.do_include_pass([ "#if 1", "PASS", "#elif 1", "FAIL", "#else", "FAIL", "#endif", ]) def test_conditional_if_1_if_1(self): self.do_include_pass([ "#if 1", "#if 1", "PASS", "#else", "FAIL", "#endif", "#else", "FAIL", "#endif", ]) def test_conditional_not_0(self): self.do_include_pass([ "#if !0", "PASS", "#else", "FAIL", "#endif", ]) def test_conditional_not_0_and_1(self): self.do_include_pass([ "#if !0 && !1", "FAIL", "#else", "PASS", "#endif", ]) def test_conditional_not_1(self): self.do_include_pass([ "#if !1", "FAIL", "#else", "PASS", "#endif", ]) def test_conditional_not_emptyval(self): self.do_include_compare( [ "#define EMPTYVAL", "#ifndef EMPTYVAL", "FAIL", "#else", "PASS", "#endif", "#ifdef EMPTYVAL", "PASS", "#else", "FAIL", "#endif", ], ["PASS", "PASS"], ) def test_conditional_not_nullval(self): self.do_include_pass([ "#define NULLVAL 0", "#if !NULLVAL", "PASS", "#else", "FAIL", "#endif", ]) def test_indentation(self): self.do_include_pass([ " #define NULLVAL 0", " #if !NULLVAL", "PASS", " #else", "FAIL", " #endif", ]) def test_expand(self): self.do_include_pass([ "#define ASVAR AS", "#expand P__ASVAR__S", ]) def test_undef_defined(self): self.do_include_compare( [ "#define BAR", "#undef BAR", "BAR", ], ["BAR"], ) def test_undef_undefined(self): self.do_include_compare( [ "#undef BAR", ], [], ) def test_filter_attemptSubstitution(self): self.do_include_compare( [ "#filter attemptSubstitution", "@PASS@", "#unfilter attemptSubstitution", ], ["@PASS@"], ) def test_filter_emptyLines(self): self.do_include_compare( [ "lines with a", "", "blank line", "#filter emptyLines", "lines with", "", "no blank lines", "#unfilter emptyLines", "yet more lines with", "", "blank lines", ], [ "lines with a", "", "blank line", "lines with", "no blank lines", "yet more lines with", "", "blank lines", ], ) def test_filter_dumbComments(self): self.do_include_compare( [ "#filter dumbComments", "PASS//PASS // PASS", " //FAIL", "// FAIL", "PASS //", "PASS // FAIL", "//", "", "#unfilter dumbComments", "// PASS", ], [ "PASS//PASS // PASS", "", "", "PASS //", "PASS // FAIL", "", "", "// PASS", ], ) def test_filter_dumbComments_and_emptyLines(self): self.do_include_compare( [ "#filter dumbComments emptyLines", "PASS//PASS // PASS", " //FAIL", "// FAIL", "PASS //", "PASS // FAIL", "//", "", "#unfilter dumbComments emptyLines", "", "// PASS", ], [ "PASS//PASS // PASS", "PASS //", "PASS // FAIL", "", "// PASS", ], ) def test_filter_substitution(self): self.do_include_pass([ "#define VAR ASS", "#filter substitution", "P@VAR@", "#unfilter substitution", ]) def test_error(self): with MockedOpen({"f": "#error spit this message out\n"}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include("f") self.assertEqual(e.args[0][-1], "spit this message out") def test_javascript_line(self): # The preprocessor is reading the filename from somewhere not caught # by MockedOpen. tmpdir = mkdtemp() try: full = os.path.join(tmpdir, "javascript_line.js.in") with open(full, "w") as fh: fh.write("\n".join([ "// Line 1", "#if 0", "// line 3", "#endif", "// line 5", "# comment", "// line 7", "// line 8", "// line 9", "# another comment", "// line 11", "#define LINE 1", "// line 13, given line number overwritten with 2", "", ])) self.pp.do_include(full) out = "\n".join([ "// Line 1", '//@line 5 "CWDjavascript_line.js.in"', "// line 5", '//@line 7 "CWDjavascript_line.js.in"', "// line 7", "// line 8", "// line 9", '//@line 11 "CWDjavascript_line.js.in"', "// line 11", '//@line 2 "CWDjavascript_line.js.in"', "// line 13, given line number overwritten with 2", "", ]) out = out.replace("CWD", tmpdir + os.path.sep) self.assertEqual(self.pp.out.getvalue(), out) finally: shutil.rmtree(tmpdir) def test_literal(self): self.do_include_pass([ "#literal PASS", ]) def test_var_directory(self): self.do_include_pass([ "#ifdef DIRECTORY", "PASS", "#else", "FAIL", "#endif", ]) def test_var_file(self): self.do_include_pass([ "#ifdef FILE", "PASS", "#else", "FAIL", "#endif", ]) def test_var_if_0(self): self.do_include_pass([ "#define VAR 0", "#if VAR", "FAIL", "#else", "PASS", "#endif", ]) def test_var_if_0_elifdef(self): self.do_include_pass([ "#if 0", "#elifdef FILE", "PASS", "#else", "FAIL", "#endif", ]) def test_var_if_0_elifndef(self): self.do_include_pass([ "#if 0", "#elifndef VAR", "PASS", "#else", "FAIL", "#endif", ]) def test_var_ifdef_0(self): self.do_include_pass([ "#define VAR 0", "#ifdef VAR", "PASS", "#else", "FAIL", "#endif", ]) def test_var_ifdef_1_or_undef(self): self.do_include_pass([ "#define FOO 1", "#if defined(FOO) || defined(BAR)", "PASS", "#else", "FAIL", "#endif", ]) def test_var_ifdef_undef(self): self.do_include_pass([ "#define VAR 0", "#undef VAR", "#ifdef VAR", "FAIL", "#else", "PASS", "#endif", ]) def test_var_ifndef_0(self): self.do_include_pass([ "#define VAR 0", "#ifndef VAR", "FAIL", "#else", "PASS", "#endif", ]) def test_var_ifndef_0_and_undef(self): self.do_include_pass([ "#define FOO 0", "#if !defined(FOO) && !defined(BAR)", "FAIL", "#else", "PASS", "#endif", ]) def test_var_ifndef_undef(self): self.do_include_pass([ "#define VAR 0", "#undef VAR", "#ifndef VAR", "PASS", "#else", "FAIL", "#endif", ]) def test_var_line(self): self.do_include_pass([ "#ifdef LINE", "PASS", "#else", "FAIL", "#endif", ]) def test_filterDefine(self): self.do_include_pass([ "#filter substitution", "#define VAR AS", "#define VAR2 P@VAR@", "@VAR2@S", ]) def test_number_value_equals(self): self.do_include_pass([ "#define FOO 1000", "#if FOO == 1000", "PASS", "#else", "FAIL", "#endif", ]) def test_default_defines(self): self.pp.handleCommandLine(["-DFOO"]) self.do_include_pass([ "#if FOO == 1", "PASS", "#else", "FAIL", ]) def test_number_value_equals_defines(self): self.pp.handleCommandLine(["-DFOO=1000"]) self.do_include_pass([ "#if FOO == 1000", "PASS", "#else", "FAIL", ]) def test_octal_value_equals(self): self.do_include_pass([ "#define FOO 0100", "#if FOO == 0100", "PASS", "#else", "FAIL", "#endif", ]) def test_octal_value_equals_defines(self): self.pp.handleCommandLine(["-DFOO=0100"]) self.do_include_pass([ "#if FOO == 0100", "PASS", "#else", "FAIL", "#endif", ]) def test_value_quoted_expansion(self): """ Quoted values on the commandline don't currently have quotes stripped. Pike says this is for compat reasons. """ self.pp.handleCommandLine(['-DFOO="ABCD"']) self.do_include_compare( [ "#filter substitution", "@FOO@", ], ['"ABCD"'], ) def test_octal_value_quoted_expansion(self): self.pp.handleCommandLine(['-DFOO="0100"']) self.do_include_compare( [ "#filter substitution", "@FOO@", ], ['"0100"'], ) def test_number_value_not_equals_quoted_defines(self): self.pp.handleCommandLine(['-DFOO="1000"']) self.do_include_pass([ "#if FOO == 1000", "FAIL", "#else", "PASS", "#endif", ]) def test_octal_value_not_equals_quoted_defines(self): self.pp.handleCommandLine(['-DFOO="0100"']) self.do_include_pass([ "#if FOO == 0100", "FAIL", "#else", "PASS", "#endif", ]) def test_undefined_variable(self): with MockedOpen({"f": "#filter substitution\n@foo@"}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include("f") self.assertEqual(e.key, "UNDEFINED_VAR") def test_include(self): files = { "foo/test": "\n".join([ "#define foo foobarbaz", "#include @inc@", "@bar@", "", ]), "bar": "\n".join([ "#define bar barfoobaz", "@foo@", "", ]), "f": "\n".join([ "#filter substitution", "#define inc ../bar", "#include foo/test", "", ]), } with MockedOpen(files): self.pp.do_include("f") self.assertEqual(self.pp.out.getvalue(), "foobarbaz\nbarfoobaz\n") def test_include_line(self): files = { "srcdir/test.js": "\n".join([ "#define foo foobarbaz", "#include @inc@", "@bar@", "", ]), "srcdir/bar.js": "\n".join([ "#define bar barfoobaz", "@foo@", "", ]), "srcdir/foo.js": "\n".join([ "bazfoobar", "#include bar.js", "bazbarfoo", "", ]), "objdir/baz.js": "baz\n", "srcdir/f.js": "\n".join([ "#include foo.js", "#filter substitution", "#define inc bar.js", "#include test.js", "#include ../objdir/baz.js", "fin", "", ]), } preprocessed = ('//@line 1 "$SRCDIR/foo.js"\n' "bazfoobar\n" '//@line 2 "$SRCDIR/bar.js"\n' "@foo@\n" '//@line 3 "$SRCDIR/foo.js"\n' "bazbarfoo\n" '//@line 2 "$SRCDIR/bar.js"\n' "foobarbaz\n" '//@line 3 "$SRCDIR/test.js"\n' "barfoobaz\n" '//@line 1 "$OBJDIR/baz.js"\n' "baz\n" '//@line 6 "$SRCDIR/f.js"\n' "fin\n") # Try with separate srcdir/objdir with MockedOpen(files): self.pp.topsrcdir = os.path.abspath("srcdir") self.pp.topobjdir = os.path.abspath("objdir") self.pp.do_include("srcdir/f.js") self.assertEqual(self.pp.out.getvalue(), preprocessed) # Try again with relative objdir self.setUp() files["srcdir/objdir/baz.js"] = files["objdir/baz.js"] del files["objdir/baz.js"] files["srcdir/f.js"] = files["srcdir/f.js"].replace("../", "") with MockedOpen(files): self.pp.topsrcdir = os.path.abspath("srcdir") self.pp.topobjdir = os.path.abspath("srcdir/objdir") self.pp.do_include("srcdir/f.js") self.assertEqual(self.pp.out.getvalue(), preprocessed) def test_include_missing_file(self): with MockedOpen({"f": "#include foo\n"}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include("f") self.assertEqual(e.exception.key, "FILE_NOT_FOUND") def test_include_undefined_variable(self): with MockedOpen({"f": "#filter substitution\n#include @foo@\n"}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include("f") self.assertEqual(e.exception.key, "UNDEFINED_VAR") def test_include_literal_at(self): files = { "@foo@": "#define foo foobarbaz\n", "f": "#include @foo@\n#filter substitution\n@foo@\n", } with MockedOpen(files): self.pp.do_include("f") self.assertEqual(self.pp.out.getvalue(), "foobarbaz\n") def test_command_line_literal_at(self): with MockedOpen({"@[email protected]": "@foo@\n"}): self.pp.handleCommandLine( ["-Fsubstitution", "-Dfoo=foobarbaz", "@[email protected]"]) self.assertEqual(self.pp.out.getvalue(), "foobarbaz\n") def test_invalid_ifdef(self): with MockedOpen({"dummy": "#ifdef FOO == BAR\nPASS\n#endif"}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include("dummy") self.assertEqual(e.exception.key, "INVALID_VAR") with MockedOpen({"dummy": "#ifndef FOO == BAR\nPASS\n#endif"}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include("dummy") self.assertEqual(e.exception.key, "INVALID_VAR") # Trailing whitespaces, while not nice, shouldn't be an error. self.do_include_pass([ "#ifndef FOO ", "PASS", "#endif", ])
def parse_defines(paths): pp = Preprocessor() for path in paths: pp.do_include(path) return pp.context
class JarMaker(object): '''JarMaker reads jar.mn files and process those into jar files or flat directories, along with chrome.manifest files. ''' ignore = re.compile('\s*(\#.*)?$') jarline = re.compile('(?:(?P<jarfile>[\w\d.\-\_\\\/{}]+).jar\:)|(?:\s*(\#.*)?)\s*$') relsrcline = re.compile('relativesrcdir\s+(?P<relativesrcdir>.+?):') regline = re.compile('\%\s+(.*)$') entryre = '(?P<optPreprocess>\*)?(?P<optOverwrite>\+?)\s+' entryline = re.compile(entryre + '(?P<output>[\w\d.\-\_\\\/\+\@]+)\s*(\((?P<locale>\%?)(?P<source>[\w\d.\-\_\\\/\@\*]+)\))?\s*$' ) def __init__(self, outputFormat='flat', useJarfileManifest=True, useChromeManifest=False): self.outputFormat = outputFormat self.useJarfileManifest = useJarfileManifest self.useChromeManifest = useChromeManifest self.pp = Preprocessor() self.topsourcedir = None self.sourcedirs = [] self.localedirs = None self.l10nbase = None self.l10nmerge = None self.relativesrcdir = None self.rootManifestAppId = None def getCommandLineParser(self): '''Get a optparse.OptionParser for jarmaker. This OptionParser has the options for jarmaker as well as the options for the inner PreProcessor. ''' # HACK, we need to unescape the string variables we get, # the perl versions didn't grok strings right p = self.pp.getCommandLineParser(unescapeDefines=True) p.add_option('-f', type='choice', default='jar', choices=('jar', 'flat', 'symlink'), help='fileformat used for output', metavar='[jar, flat, symlink]', ) p.add_option('-v', action='store_true', dest='verbose', help='verbose output') p.add_option('-q', action='store_false', dest='verbose', help='verbose output') p.add_option('-e', action='store_true', help='create chrome.manifest instead of jarfile.manifest' ) p.add_option('-s', type='string', action='append', default=[], help='source directory') p.add_option('-t', type='string', help='top source directory') p.add_option('-c', '--l10n-src', type='string', action='append' , help='localization directory') p.add_option('--l10n-base', type='string', action='store', help='base directory to be used for localization (requires relativesrcdir)' ) p.add_option('--locale-mergedir', type='string', action='store' , help='base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)' ) p.add_option('--relativesrcdir', type='string', help='relativesrcdir to be used for localization') p.add_option('-j', type='string', help='jarfile directory') p.add_option('--root-manifest-entry-appid', type='string', help='add an app id specific root chrome manifest entry.' ) return p def processIncludes(self, includes): '''Process given includes with the inner PreProcessor. Only use this for #defines, the includes shouldn't generate content. ''' self.pp.out = StringIO() for inc in includes: self.pp.do_include(inc) includesvalue = self.pp.out.getvalue() if includesvalue: logging.info('WARNING: Includes produce non-empty output') self.pp.out = None def finalizeJar(self, jarPath, chromebasepath, register, doZip=True): '''Helper method to write out the chrome registration entries to jarfile.manifest or chrome.manifest, or both. The actual file processing is done in updateManifest. ''' # rewrite the manifest, if entries given if not register: return chromeManifest = os.path.join(os.path.dirname(jarPath), '..', 'chrome.manifest') if self.useJarfileManifest: self.updateManifest(jarPath + '.manifest', chromebasepath.format(''), register) addEntriesToListFile(chromeManifest, ['manifest chrome/{0}.manifest'.format(os.path.basename(jarPath))]) if self.useChromeManifest: self.updateManifest(chromeManifest, chromebasepath.format('chrome/'), register) # If requested, add a root chrome manifest entry (assumed to be in the parent directory # of chromeManifest) with the application specific id. In cases where we're building # lang packs, the root manifest must know about application sub directories. if self.rootManifestAppId: rootChromeManifest = \ os.path.join(os.path.normpath(os.path.dirname(chromeManifest)), '..', 'chrome.manifest') rootChromeManifest = os.path.normpath(rootChromeManifest) chromeDir = \ os.path.basename(os.path.dirname(os.path.normpath(chromeManifest))) logging.info("adding '%s' entry to root chrome manifest appid=%s" % (chromeDir, self.rootManifestAppId)) addEntriesToListFile(rootChromeManifest, ['manifest %s/chrome.manifest application=%s' % (chromeDir, self.rootManifestAppId)]) def updateManifest(self, manifestPath, chromebasepath, register): '''updateManifest replaces the % in the chrome registration entries with the given chrome base path, and updates the given manifest file. ''' ensureParentDir(manifestPath) lock = lock_file(manifestPath + '.lck') try: myregister = dict.fromkeys(map(lambda s: s.replace('%', chromebasepath), register.iterkeys())) manifestExists = os.path.isfile(manifestPath) mode = manifestExists and 'r+b' or 'wb' mf = open(manifestPath, mode) if manifestExists: # import previous content into hash, ignoring empty ones and comments imf = re.compile('(#.*)?$') for l in re.split('[\r\n]+', mf.read()): if imf.match(l): continue myregister[l] = None mf.seek(0) for k in sorted(myregister.iterkeys()): mf.write(k + os.linesep) mf.close() finally: lock = None def makeJar(self, infile, jardir): '''makeJar is the main entry point to JarMaker. It takes the input file, the output directory, the source dirs and the top source dir as argument, and optionally the l10n dirs. ''' # making paths absolute, guess srcdir if file and add to sourcedirs _normpath = lambda p: os.path.normpath(os.path.abspath(p)) self.topsourcedir = _normpath(self.topsourcedir) self.sourcedirs = [_normpath(p) for p in self.sourcedirs] if self.localedirs: self.localedirs = [_normpath(p) for p in self.localedirs] elif self.relativesrcdir: self.localedirs = \ self.generateLocaleDirs(self.relativesrcdir) if isinstance(infile, basestring): logging.info('processing ' + infile) self.sourcedirs.append(_normpath(os.path.dirname(infile))) pp = self.pp.clone() pp.out = StringIO() pp.do_include(infile) lines = PushbackIter(pp.out.getvalue().splitlines()) try: while True: l = lines.next() m = self.jarline.match(l) if not m: raise RuntimeError(l) if m.group('jarfile') is None: # comment continue self.processJarSection(m.group('jarfile'), lines, jardir) except StopIteration: # we read the file pass return def generateLocaleDirs(self, relativesrcdir): if os.path.basename(relativesrcdir) == 'locales': # strip locales l10nrelsrcdir = os.path.dirname(relativesrcdir) else: l10nrelsrcdir = relativesrcdir locdirs = [] # generate locales dirs, merge, l10nbase, en-US if self.l10nmerge: locdirs.append(os.path.join(self.l10nmerge, l10nrelsrcdir)) if self.l10nbase: locdirs.append(os.path.join(self.l10nbase, l10nrelsrcdir)) if self.l10nmerge or not self.l10nbase: # add en-US if we merge, or if it's not l10n locdirs.append(os.path.join(self.topsourcedir, relativesrcdir, 'en-US')) return locdirs def processJarSection(self, jarfile, lines, jardir): '''Internal method called by makeJar to actually process a section of a jar.mn file. jarfile is the basename of the jarfile or the directory name for flat output, lines is a PushbackIter of the lines of jar.mn, the remaining options are carried over from makeJar. ''' # chromebasepath is used for chrome registration manifests # {0} is getting replaced with chrome/ for chrome.manifest, and with # an empty string for jarfile.manifest chromebasepath = '{0}' + os.path.basename(jarfile) if self.outputFormat == 'jar': chromebasepath = 'jar:' + chromebasepath + '.jar!' chromebasepath += '/' jarfile = os.path.join(jardir, jarfile) jf = None if self.outputFormat == 'jar': # jar jarfilepath = jarfile + '.jar' try: os.makedirs(os.path.dirname(jarfilepath)) except OSError, error: if error.errno != errno.EEXIST: raise jf = ZipFile(jarfilepath, 'a', lock=True) outHelper = self.OutputHelper_jar(jf) else:
def _consume_jar_manifest(self, obj, defines): # Ideally, this would all be handled somehow in the emitter, but # this would require all the magic surrounding l10n and addons in # the recursive make backend to die, which is not going to happen # any time soon enough. # Notably missing: # - DEFINES from config/config.mk # - L10n support # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in # moz.build, but it doesn't matter in dist/bin. pp = Preprocessor() pp.context.update(defines) pp.context.update(self.environment.defines) pp.context.update( AB_CD='en-US', BUILD_FASTER=1, ) pp.out = JarManifestParser() pp.do_include(obj.path) for jarinfo in pp.out: install_target = obj.install_target # Bug 1150417 added some gross hacks, which we don't try to # support generically. Fortunately, the hacks don't define more # than chrome manifest entries, so just assume we don't get # any installation entries. if jarinfo.name.startswith('../'): assert not jarinfo.entries base = mozpath.join('chrome', jarinfo.name) for e in jarinfo.entries: if e.is_locale: src = mozpath.join( jarinfo.relativesrcdir or mozpath.dirname(obj.path), 'en-US', e.source) elif e.source.startswith('/'): src = mozpath.join(self.environment.topsrcdir, e.source[1:]) else: src = mozpath.join(mozpath.dirname(obj.path), e.source) if '*' in e.source: if e.preprocess: raise Exception('%s: Wildcards are not supported with ' 'preprocessing' % obj.path) def _prefix(s): for p in s.split('/'): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(src)) self._install_manifests[obj.install_target] \ .add_pattern_symlink( prefix, src[len(prefix):], mozpath.join(base, e.output)) continue if not os.path.exists(src): if e.is_locale: raise Exception( '%s: Cannot find %s' % (obj.path, e.source)) if e.source.startswith('/'): src = mozpath.join(self.environment.topobjdir, e.source[1:]) else: # This actually gets awkward if the jar.mn is not # in the same directory as the moz.build declaring # it, but it's how it works in the recursive make, # not that anything relies on that, but it's simpler. src = mozpath.join(obj.objdir, e.source) self._dependencies['install-%s' % obj.install_target] \ .append(mozpath.relpath( src, self.environment.topobjdir)) if e.preprocess: kwargs = {} if src.endswith('.css'): kwargs['marker'] = '%' self._add_preprocess( obj, src, mozpath.join(base, mozpath.dirname(e.output)), mozpath.basename(e.output), defines=defines, **kwargs) else: self._install_manifests[obj.install_target].add_symlink( src, mozpath.join(base, e.output)) manifest = mozpath.normpath(mozpath.join(obj.install_target, base)) manifest += '.manifest' for m in jarinfo.chrome_manifests: self._manifest_entries[manifest].append( m.replace('%', jarinfo.name + '/')) # ../ special cased for bug 1150417 again. if not jarinfo.name.startswith('../'): manifest = mozpath.normpath(mozpath.join(obj.install_target, 'chrome.manifest')) entry = 'manifest %s.manifest' % base if entry not in self._manifest_entries[manifest]: self._manifest_entries[manifest].append(entry)
class JarMaker(object): '''JarMaker reads jar.mn files and process those into jar files or flat directories, along with chrome.manifest files. ''' def __init__(self, outputFormat='flat', useJarfileManifest=True, useChromeManifest=False): self.outputFormat = outputFormat self.useJarfileManifest = useJarfileManifest self.useChromeManifest = useChromeManifest self.pp = Preprocessor() self.topsourcedir = None self.sourcedirs = [] self.localedirs = None self.l10nbase = None self.l10nmerge = None self.relativesrcdir = None self.rootManifestAppId = None def getCommandLineParser(self): '''Get a optparse.OptionParser for jarmaker. This OptionParser has the options for jarmaker as well as the options for the inner PreProcessor. ''' # HACK, we need to unescape the string variables we get, # the perl versions didn't grok strings right p = self.pp.getCommandLineParser(unescapeDefines=True) p.add_option( '-f', type='choice', default='jar', choices=('jar', 'flat', 'symlink'), help='fileformat used for output', metavar='[jar, flat, symlink]', ) p.add_option('-v', action='store_true', dest='verbose', help='verbose output') p.add_option('-q', action='store_false', dest='verbose', help='verbose output') p.add_option('-e', action='store_true', help='create chrome.manifest instead of jarfile.manifest') p.add_option('-s', type='string', action='append', default=[], help='source directory') p.add_option('-t', type='string', help='top source directory') p.add_option('-c', '--l10n-src', type='string', action='append', help='localization directory') p.add_option( '--l10n-base', type='string', action='store', help= 'base directory to be used for localization (requires relativesrcdir)' ) p.add_option( '--locale-mergedir', type='string', action='store', help= 'base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)' ) p.add_option('--relativesrcdir', type='string', help='relativesrcdir to be used for localization') p.add_option('-d', type='string', help='base directory') p.add_option('--root-manifest-entry-appid', type='string', help='add an app id specific root chrome manifest entry.') return p def processIncludes(self, includes): '''Process given includes with the inner PreProcessor. Only use this for #defines, the includes shouldn't generate content. ''' self.pp.out = StringIO() for inc in includes: self.pp.do_include(inc) includesvalue = self.pp.out.getvalue() if includesvalue: logging.info('WARNING: Includes produce non-empty output') self.pp.out = None def finalizeJar(self, jardir, jarbase, jarname, chromebasepath, register, doZip=True): '''Helper method to write out the chrome registration entries to jarfile.manifest or chrome.manifest, or both. The actual file processing is done in updateManifest. ''' # rewrite the manifest, if entries given if not register: return chromeManifest = os.path.join(jardir, jarbase, 'chrome.manifest') if self.useJarfileManifest: self.updateManifest( os.path.join(jardir, jarbase, jarname + '.manifest'), chromebasepath.format(''), register) if jarname != 'chrome': addEntriesToListFile(chromeManifest, ['manifest {0}.manifest'.format(jarname)]) if self.useChromeManifest: chromebase = os.path.dirname(jarname) + '/' self.updateManifest(chromeManifest, chromebasepath.format(chromebase), register) # If requested, add a root chrome manifest entry (assumed to be in the parent directory # of chromeManifest) with the application specific id. In cases where we're building # lang packs, the root manifest must know about application sub directories. if self.rootManifestAppId: rootChromeManifest = \ os.path.join(os.path.normpath(os.path.dirname(chromeManifest)), '..', 'chrome.manifest') rootChromeManifest = os.path.normpath(rootChromeManifest) chromeDir = \ os.path.basename(os.path.dirname(os.path.normpath(chromeManifest))) logging.info("adding '%s' entry to root chrome manifest appid=%s" % (chromeDir, self.rootManifestAppId)) addEntriesToListFile(rootChromeManifest, [ 'manifest %s/chrome.manifest application=%s' % (chromeDir, self.rootManifestAppId) ]) def updateManifest(self, manifestPath, chromebasepath, register): '''updateManifest replaces the % in the chrome registration entries with the given chrome base path, and updates the given manifest file. ''' myregister = dict.fromkeys( map(lambda s: s.replace('%', chromebasepath), register)) addEntriesToListFile(manifestPath, myregister.iterkeys()) def makeJar(self, infile, jardir): '''makeJar is the main entry point to JarMaker. It takes the input file, the output directory, the source dirs and the top source dir as argument, and optionally the l10n dirs. ''' # making paths absolute, guess srcdir if file and add to sourcedirs _normpath = lambda p: os.path.normpath(os.path.abspath(p)) self.topsourcedir = _normpath(self.topsourcedir) self.sourcedirs = [_normpath(p) for p in self.sourcedirs] if self.localedirs: self.localedirs = [_normpath(p) for p in self.localedirs] elif self.relativesrcdir: self.localedirs = \ self.generateLocaleDirs(self.relativesrcdir) if isinstance(infile, basestring): logging.info('processing ' + infile) self.sourcedirs.append(_normpath(os.path.dirname(infile))) pp = self.pp.clone() pp.out = JarManifestParser() pp.do_include(infile) for info in pp.out: self.processJarSection(info, jardir) def generateLocaleDirs(self, relativesrcdir): if os.path.basename(relativesrcdir) == 'locales': # strip locales l10nrelsrcdir = os.path.dirname(relativesrcdir) else: l10nrelsrcdir = relativesrcdir locdirs = [] # generate locales dirs, merge, l10nbase, en-US if self.l10nmerge: locdirs.append(os.path.join(self.l10nmerge, l10nrelsrcdir)) if self.l10nbase: locdirs.append(os.path.join(self.l10nbase, l10nrelsrcdir)) if self.l10nmerge or not self.l10nbase: # add en-US if we merge, or if it's not l10n locdirs.append( os.path.join(self.topsourcedir, relativesrcdir, 'en-US')) return locdirs def processJarSection(self, jarinfo, jardir): '''Internal method called by makeJar to actually process a section of a jar.mn file. ''' # chromebasepath is used for chrome registration manifests # {0} is getting replaced with chrome/ for chrome.manifest, and with # an empty string for jarfile.manifest chromebasepath = '{0}' + os.path.basename(jarinfo.name) if self.outputFormat == 'jar': chromebasepath = 'jar:' + chromebasepath + '.jar!' chromebasepath += '/' jarfile = os.path.join(jardir, jarinfo.base, jarinfo.name) jf = None if self.outputFormat == 'jar': # jar jarfilepath = jarfile + '.jar' try: os.makedirs(os.path.dirname(jarfilepath)) except OSError, error: if error.errno != errno.EEXIST: raise jf = ZipFile(jarfilepath, 'a', lock=True) outHelper = self.OutputHelper_jar(jf) else:
def _consume_jar_manifest(self, obj): # Ideally, this would all be handled somehow in the emitter, but # this would require all the magic surrounding l10n and addons in # the recursive make backend to die, which is not going to happen # any time soon enough. # Notably missing: # - DEFINES from config/config.mk # - L10n support # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in # moz.build, but it doesn't matter in dist/bin. pp = Preprocessor() if obj.defines: pp.context.update(obj.defines.defines) pp.context.update(self.environment.defines) pp.context.update( AB_CD='en-US', BUILD_FASTER=1, ) pp.out = JarManifestParser() try: pp.do_include(obj.path.full_path) except DeprecatedJarManifest as e: raise DeprecatedJarManifest('Parsing error while processing %s: %s' % (obj.path.full_path, e.message)) self.backend_input_files |= pp.includes for jarinfo in pp.out: jar_context = Context( allowed_variables=VARIABLES, config=obj._context.config) jar_context.push_source(obj._context.main_path) jar_context.push_source(obj.path.full_path) install_target = obj.install_target if jarinfo.base: install_target = mozpath.normpath( mozpath.join(install_target, jarinfo.base)) jar_context['FINAL_TARGET'] = install_target if obj.defines: jar_context['DEFINES'] = obj.defines.defines files = jar_context['FINAL_TARGET_FILES'] files_pp = jar_context['FINAL_TARGET_PP_FILES'] for e in jarinfo.entries: if e.is_locale: if jarinfo.relativesrcdir: src = '/%s' % jarinfo.relativesrcdir else: src = '' src = mozpath.join(src, 'en-US', e.source) else: src = e.source src = Path(jar_context, src) if '*' not in e.source and not os.path.exists(src.full_path): if e.is_locale: raise Exception( '%s: Cannot find %s' % (obj.path, e.source)) if e.source.startswith('/'): src = Path(jar_context, '!' + e.source) else: # This actually gets awkward if the jar.mn is not # in the same directory as the moz.build declaring # it, but it's how it works in the recursive make, # not that anything relies on that, but it's simpler. src = Path(obj._context, '!' + e.source) output_basename = mozpath.basename(e.output) if output_basename != src.target_basename: src = RenamedSourcePath(jar_context, (src, output_basename)) path = mozpath.dirname(mozpath.join(jarinfo.name, e.output)) if e.preprocess: if '*' in e.source: raise Exception('%s: Wildcards are not supported with ' 'preprocessing' % obj.path) files_pp[path] += [src] else: files[path] += [src] if files: self.consume_object(FinalTargetFiles(jar_context, files)) if files_pp: self.consume_object( FinalTargetPreprocessedFiles(jar_context, files_pp)) for m in jarinfo.chrome_manifests: entry = parse_manifest_line( mozpath.dirname(jarinfo.name), m.replace('%', mozpath.basename(jarinfo.name) + '/')) self.consume_object(ChromeManifestEntry( jar_context, '%s.manifest' % jarinfo.name, entry))
def generate_symbols_file(output, *args): ''' ''' parser = argparse.ArgumentParser() parser.add_argument('input') parser.add_argument('-D', action=DefinesAction) parser.add_argument('-U', action='append', default=[]) args = parser.parse_args(args) input = os.path.abspath(args.input) pp = Preprocessor() pp.context.update(buildconfig.defines['ALLDEFINES']) if args.D: pp.context.update(args.D) for undefine in args.U: if undefine in pp.context: del pp.context[undefine] # Hack until MOZ_DEBUG_FLAGS are simply part of buildconfig.defines if buildconfig.substs.get('MOZ_DEBUG'): pp.context['DEBUG'] = '1' # Ensure @DATA@ works as expected (see the Windows section further below) if buildconfig.substs['OS_TARGET'] == 'WINNT': pp.context['DATA'] = 'DATA' else: pp.context['DATA'] = '' pp.out = StringIO() pp.do_filter('substitution') pp.do_include(input) symbols = [s.strip() for s in pp.out.getvalue().splitlines() if s.strip()] if buildconfig.substs['OS_TARGET'] == 'WINNT': # A def file is generated for MSVC link.exe that looks like the # following: # LIBRARY library.dll # EXPORTS # symbol1 # symbol2 # ... # # link.exe however requires special markers for data symbols, so in # that case the symbols look like: # data_symbol1 DATA # data_symbol2 DATA # ... # # In the input file, this is just annotated with the following syntax: # data_symbol1 @DATA@ # data_symbol2 @DATA@ # ... # The DATA variable is "simply" expanded by the preprocessor, to # nothing on non-Windows, such that we only get the symbol name on # those platforms, and to DATA on Windows, so that the "DATA" part # is, in fact, part of the symbol name as far as the symbols variable # is concerned. libname, ext = os.path.splitext(os.path.basename(output.name)) assert ext == '.def' output.write('LIBRARY %s\nEXPORTS\n %s\n' % (libname, '\n '.join(symbols))) elif buildconfig.substs['GCC_USE_GNU_LD']: # A linker version script is generated for GNU LD that looks like the # following: # { # global: # symbol1; # symbol2; # ... # local: # *; # }; output.write('{\nglobal:\n %s;\nlocal:\n *;\n};' % ';\n '.join(symbols)) elif buildconfig.substs['OS_TARGET'] == 'Darwin': # A list of symbols is generated for Apple ld that simply lists all # symbols, with an underscore prefix. output.write(''.join('_%s\n' % s for s in symbols)) return set(pp.includes)
def _consume_jar_manifest(self, obj): # Ideally, this would all be handled somehow in the emitter, but # this would require all the magic surrounding l10n and addons in # the recursive make backend to die, which is not going to happen # any time soon enough. # Notably missing: # - DEFINES from config/config.mk # - L10n support # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in # moz.build, but it doesn't matter in dist/bin. pp = Preprocessor() if obj.defines: pp.context.update(obj.defines.defines) pp.context.update(self.environment.defines) pp.context.update( AB_CD='en-US', BUILD_FASTER=1, ) pp.out = JarManifestParser() try: pp.do_include(obj.path.full_path) except DeprecatedJarManifest as e: raise DeprecatedJarManifest( 'Parsing error while processing %s: %s' % (obj.path.full_path, e.message)) self.backend_input_files |= pp.includes for jarinfo in pp.out: jar_context = Context(allowed_variables=VARIABLES, config=obj._context.config) jar_context.push_source(obj._context.main_path) jar_context.push_source(obj.path.full_path) install_target = obj.install_target if jarinfo.base: install_target = mozpath.normpath( mozpath.join(install_target, jarinfo.base)) jar_context['FINAL_TARGET'] = install_target if obj.defines: jar_context['DEFINES'] = obj.defines.defines files = jar_context['FINAL_TARGET_FILES'] files_pp = jar_context['FINAL_TARGET_PP_FILES'] for e in jarinfo.entries: if e.is_locale: if jarinfo.relativesrcdir: src = '/%s' % jarinfo.relativesrcdir else: src = '' src = mozpath.join(src, 'en-US', e.source) else: src = e.source src = Path(jar_context, src) if '*' not in e.source and not os.path.exists(src.full_path): if e.is_locale: raise Exception('%s: Cannot find %s' % (obj.path, e.source)) if e.source.startswith('/'): src = Path(jar_context, '!' + e.source) else: # This actually gets awkward if the jar.mn is not # in the same directory as the moz.build declaring # it, but it's how it works in the recursive make, # not that anything relies on that, but it's simpler. src = Path(obj._context, '!' + e.source) output_basename = mozpath.basename(e.output) if output_basename != src.target_basename: src = RenamedSourcePath(jar_context, (src, output_basename)) path = mozpath.dirname(mozpath.join(jarinfo.name, e.output)) if e.preprocess: if '*' in e.source: raise Exception('%s: Wildcards are not supported with ' 'preprocessing' % obj.path) files_pp[path] += [src] else: files[path] += [src] if files: self.consume_object(FinalTargetFiles(jar_context, files)) if files_pp: self.consume_object( FinalTargetPreprocessedFiles(jar_context, files_pp)) for m in jarinfo.chrome_manifests: entry = parse_manifest_line( mozpath.dirname(jarinfo.name), m.replace('%', mozpath.basename(jarinfo.name) + '/')) self.consume_object( ChromeManifestEntry(jar_context, '%s.manifest' % jarinfo.name, entry))
class JarMaker(object): '''JarMaker reads jar.mn files and process those into jar files or flat directories, along with chrome.manifest files. ''' def __init__(self, outputFormat='flat', useJarfileManifest=True, useChromeManifest=False): self.outputFormat = outputFormat self.useJarfileManifest = useJarfileManifest self.useChromeManifest = useChromeManifest self.pp = Preprocessor() self.topsourcedir = None self.sourcedirs = [] self.localedirs = None self.l10nbase = None self.l10nmerge = None self.relativesrcdir = None self.rootManifestAppId = None def getCommandLineParser(self): '''Get a optparse.OptionParser for jarmaker. This OptionParser has the options for jarmaker as well as the options for the inner PreProcessor. ''' # HACK, we need to unescape the string variables we get, # the perl versions didn't grok strings right p = self.pp.getCommandLineParser(unescapeDefines=True) p.add_option('-f', type='choice', default='jar', choices=('jar', 'flat', 'symlink'), help='fileformat used for output', metavar='[jar, flat, symlink]', ) p.add_option('-v', action='store_true', dest='verbose', help='verbose output') p.add_option('-q', action='store_false', dest='verbose', help='verbose output') p.add_option('-e', action='store_true', help='create chrome.manifest instead of jarfile.manifest' ) p.add_option('-s', type='string', action='append', default=[], help='source directory') p.add_option('-t', type='string', help='top source directory') p.add_option('-c', '--l10n-src', type='string', action='append' , help='localization directory') p.add_option('--l10n-base', type='string', action='store', help='base directory to be used for localization (requires relativesrcdir)' ) p.add_option('--locale-mergedir', type='string', action='store' , help='base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)' ) p.add_option('--relativesrcdir', type='string', help='relativesrcdir to be used for localization') p.add_option('-d', type='string', help='base directory') p.add_option('--root-manifest-entry-appid', type='string', help='add an app id specific root chrome manifest entry.' ) return p def processIncludes(self, includes): '''Process given includes with the inner PreProcessor. Only use this for #defines, the includes shouldn't generate content. ''' self.pp.out = StringIO() for inc in includes: self.pp.do_include(inc) includesvalue = self.pp.out.getvalue() if includesvalue: logging.info('WARNING: Includes produce non-empty output') self.pp.out = None def finalizeJar(self, jardir, jarbase, jarname, chromebasepath, register, doZip=True): '''Helper method to write out the chrome registration entries to jarfile.manifest or chrome.manifest, or both. The actual file processing is done in updateManifest. ''' # rewrite the manifest, if entries given if not register: return chromeManifest = os.path.join(jardir, jarbase, 'chrome.manifest') if self.useJarfileManifest: self.updateManifest(os.path.join(jardir, jarbase, jarname + '.manifest'), chromebasepath.format(''), register) if jarname != 'chrome': addEntriesToListFile(chromeManifest, ['manifest {0}.manifest'.format(jarname)]) if self.useChromeManifest: chromebase = os.path.dirname(jarname) + '/' self.updateManifest(chromeManifest, chromebasepath.format(chromebase), register) # If requested, add a root chrome manifest entry (assumed to be in the parent directory # of chromeManifest) with the application specific id. In cases where we're building # lang packs, the root manifest must know about application sub directories. if self.rootManifestAppId: rootChromeManifest = \ os.path.join(os.path.normpath(os.path.dirname(chromeManifest)), '..', 'chrome.manifest') rootChromeManifest = os.path.normpath(rootChromeManifest) chromeDir = \ os.path.basename(os.path.dirname(os.path.normpath(chromeManifest))) logging.info("adding '%s' entry to root chrome manifest appid=%s" % (chromeDir, self.rootManifestAppId)) addEntriesToListFile(rootChromeManifest, ['manifest %s/chrome.manifest application=%s' % (chromeDir, self.rootManifestAppId)]) def updateManifest(self, manifestPath, chromebasepath, register): '''updateManifest replaces the % in the chrome registration entries with the given chrome base path, and updates the given manifest file. ''' myregister = dict.fromkeys(map(lambda s: s.replace('%', chromebasepath), register)) addEntriesToListFile(manifestPath, myregister.iterkeys()) def makeJar(self, infile, jardir): '''makeJar is the main entry point to JarMaker. It takes the input file, the output directory, the source dirs and the top source dir as argument, and optionally the l10n dirs. ''' # making paths absolute, guess srcdir if file and add to sourcedirs _normpath = lambda p: os.path.normpath(os.path.abspath(p)) self.topsourcedir = _normpath(self.topsourcedir) self.sourcedirs = [_normpath(p) for p in self.sourcedirs] if self.localedirs: self.localedirs = [_normpath(p) for p in self.localedirs] elif self.relativesrcdir: self.localedirs = \ self.generateLocaleDirs(self.relativesrcdir) if isinstance(infile, basestring): logging.info('processing ' + infile) self.sourcedirs.append(_normpath(os.path.dirname(infile))) pp = self.pp.clone() pp.out = JarManifestParser() pp.do_include(infile) for info in pp.out: self.processJarSection(info, jardir) def generateLocaleDirs(self, relativesrcdir): if os.path.basename(relativesrcdir) == 'locales': # strip locales l10nrelsrcdir = os.path.dirname(relativesrcdir) else: l10nrelsrcdir = relativesrcdir locdirs = [] # generate locales dirs, merge, l10nbase, en-US if self.l10nmerge: locdirs.append(os.path.join(self.l10nmerge, l10nrelsrcdir)) if self.l10nbase: locdirs.append(os.path.join(self.l10nbase, l10nrelsrcdir)) if self.l10nmerge or not self.l10nbase: # add en-US if we merge, or if it's not l10n locdirs.append(os.path.join(self.topsourcedir, relativesrcdir, 'en-US')) return locdirs def processJarSection(self, jarinfo, jardir): '''Internal method called by makeJar to actually process a section of a jar.mn file. ''' # chromebasepath is used for chrome registration manifests # {0} is getting replaced with chrome/ for chrome.manifest, and with # an empty string for jarfile.manifest chromebasepath = '{0}' + os.path.basename(jarinfo.name) if self.outputFormat == 'jar': chromebasepath = 'jar:' + chromebasepath + '.jar!' chromebasepath += '/' jarfile = os.path.join(jardir, jarinfo.base, jarinfo.name) jf = None if self.outputFormat == 'jar': # jar jarfilepath = jarfile + '.jar' try: os.makedirs(os.path.dirname(jarfilepath)) except OSError, error: if error.errno != errno.EEXIST: raise jf = ZipFile(jarfilepath, 'a', lock=True) outHelper = self.OutputHelper_jar(jf) else:
class TestPreprocessor(unittest.TestCase): """ Unit tests for the Context class """ def setUp(self): self.pp = Preprocessor() self.pp.out = StringIO() def test_conditional_if_0(self): f = NamedIO("conditional_if_0.in", """#if 0 FAIL #else PASS #endif """) self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "PASS\n") def test_no_marker(self): no_marker = """#if 0 PASS #endif """ f = NamedIO("no_marker.in", no_marker) self.pp.setMarker(None) self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), no_marker) def test_string_value(self): f = NamedIO("string_value.in", """#define FOO STRING #if FOO string value is true #else string value is false #endif """) self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "string value is false\n") def test_number_value(self): f = NamedIO("string_value.in", """#define FOO 1 #if FOO number value is true #else number value is false #endif """) self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "number value is true\n") def test_conditional_if_0_elif_1(self): f = NamedIO('conditional_if_0_elif_1.in', '''#if 0 #elif 1 PASS #else FAIL #endif ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "PASS\n") def test_conditional_if_1(self): f = NamedIO('conditional_if_1.in', '''#if 1 PASS #else FAILE #endif ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "PASS\n") def test_conditional_if_1_elif_1_else(self): f = NamedIO('conditional_if_1_elif_1_else.in', '''#if 1 PASS #elif 1 FAIL #else FAIL #endif ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "PASS\n") def test_conditional_if_1_if_1(self): f = NamedIO('conditional_if_1_if_1.in', '''#if 1 #if 1 PASS #else FAIL #endif #else FAIL #endif ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "PASS\n") def test_conditional_not_0(self): f = NamedIO('conditional_not_0.in', '''#if !0 PASS #else FAIL #endif ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "PASS\n") def test_conditional_not_1(self): f = NamedIO('conditional_not_1.in', '''#if !1 FAIL #else PASS #endif ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "PASS\n") def test_conditional_not_emptyval(self): f = NamedIO('conditional_not_emptyval.in', '''#define EMPTYVAL #if !EMPTYVAL FAIL #else PASS #endif #if EMPTYVAL PASS #else FAIL #endif ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "PASS\nPASS\n") def test_conditional_not_nullval(self): f = NamedIO('conditional_not_nullval.in', '''#define NULLVAL 0 #if !NULLVAL PASS #else FAIL #endif ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "PASS\n") def test_expand(self): f = NamedIO('expand.in', '''#define ASVAR AS #expand P__ASVAR__S ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "PASS\n") def test_undef_defined(self): f = NamedIO('undef_defined.in', '''#define BAR #undef BAR BAR ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "BAR\n") def test_undef_undefined(self): f = NamedIO('undef_undefined.in', '''#undef VAR ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "") def test_filter_attemptSubstitution(self): f = NamedIO('filter_attemptSubstitution.in', '''#filter attemptSubstitution @PASS@ #unfilter attemptSubstitution ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "@PASS@\n") def test_filter_emptyLines(self): f = NamedIO('filter_emptyLines.in', '''lines with a blank line #filter emptyLines lines with no blank lines #unfilter emptyLines yet more lines with blank lines ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), '''lines with a blank line lines with no blank lines yet more lines with blank lines ''') def test_filter_slashslash(self): f = NamedIO('filter_slashslash.in', '''#filter slashslash PASS//FAIL // FAIL #unfilter slashslash PASS // PASS ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "PASS\nPASS // PASS\n") def test_filter_spaces(self): f = NamedIO('filter_spaces.in', '''#filter spaces You should see two nice ascii tables +-+-+-+ | | | | +-+-+-+ #unfilter spaces +-+---+ | | | +-+---+ ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), """You should see two nice ascii tables +-+-+-+ | | | | +-+-+-+ +-+---+ | | | +-+---+ """) def test_filter_substitution(self): f = NamedIO('filter_substitution.in', '''#define VAR ASS #filter substitution P@VAR@ #unfilter substitution ''') self.pp.do_include(f) self.assertEqual(self.pp.out.getvalue(), "PASS\n") def test_error(self): f = NamedIO('error.in', '''#error spit this message out ''') caught_msg = None try: self.pp.do_include(f) except Preprocessor.Error, e: caught_msg = e.args[0][-1] self.assertEqual(caught_msg, 'spit this message out')
def generate_symbols_file(output, *args): """ """ parser = argparse.ArgumentParser() parser.add_argument("input") parser.add_argument("-D", action=DefinesAction) parser.add_argument("-U", action="append", default=[]) args = parser.parse_args(args) input = os.path.abspath(args.input) pp = Preprocessor() pp.context.update(buildconfig.defines["ALLDEFINES"]) if args.D: pp.context.update(args.D) for undefine in args.U: if undefine in pp.context: del pp.context[undefine] # Hack until MOZ_DEBUG_FLAGS are simply part of buildconfig.defines if buildconfig.substs.get("MOZ_DEBUG"): pp.context["DEBUG"] = "1" # Ensure @DATA@ works as expected (see the Windows section further below) if buildconfig.substs["OS_TARGET"] == "WINNT": pp.context["DATA"] = "DATA" else: pp.context["DATA"] = "" pp.out = StringIO() pp.do_filter("substitution") pp.do_include(input) symbols = [s.strip() for s in pp.out.getvalue().splitlines() if s.strip()] libname, ext = os.path.splitext(os.path.basename(output.name)) if buildconfig.substs["OS_TARGET"] == "WINNT": # A def file is generated for MSVC link.exe that looks like the # following: # LIBRARY library.dll # EXPORTS # symbol1 # symbol2 # ... # # link.exe however requires special markers for data symbols, so in # that case the symbols look like: # data_symbol1 DATA # data_symbol2 DATA # ... # # In the input file, this is just annotated with the following syntax: # data_symbol1 @DATA@ # data_symbol2 @DATA@ # ... # The DATA variable is "simply" expanded by the preprocessor, to # nothing on non-Windows, such that we only get the symbol name on # those platforms, and to DATA on Windows, so that the "DATA" part # is, in fact, part of the symbol name as far as the symbols variable # is concerned. assert ext == ".def" output.write("LIBRARY %s\nEXPORTS\n %s\n" % (libname, "\n ".join(symbols))) elif ( buildconfig.substs.get("GCC_USE_GNU_LD") or buildconfig.substs["OS_TARGET"] == "SunOS" ): # A linker version script is generated for GNU LD that looks like the # following: # liblibrary.so { # global: # symbol1; # symbol2; # ... # local: # *; # }; output.write( "%s {\nglobal:\n %s;\nlocal:\n *;\n};" % (libname, ";\n ".join(symbols)) ) elif buildconfig.substs["OS_TARGET"] == "Darwin": # A list of symbols is generated for Apple ld that simply lists all # symbols, with an underscore prefix. output.write("".join("_%s\n" % s for s in symbols)) return set(pp.includes)
class TestPreprocessor(unittest.TestCase): """ Unit tests for the Context class """ def setUp(self): self.pp = Preprocessor() self.pp.out = StringIO() def do_include_compare(self, content_lines, expected_lines): content = '%s' % '\n'.join(content_lines) expected = '%s'.rstrip() % '\n'.join(expected_lines) with MockedOpen({'dummy': content}): self.pp.do_include('dummy') self.assertEqual(self.pp.out.getvalue().rstrip('\n'), expected) def do_include_pass(self, content_lines): self.do_include_compare(content_lines, ['PASS']) def test_conditional_if_0(self): self.do_include_pass([ '#if 0', 'FAIL', '#else', 'PASS', '#endif', ]) def test_no_marker(self): lines = [ '#if 0', 'PASS', '#endif', ] self.pp.setMarker(None) self.do_include_compare(lines, lines) def test_string_value(self): self.do_include_compare([ '#define FOO STRING', '#if FOO', 'string value is true', '#else', 'string value is false', '#endif', ], ['string value is false']) def test_number_value(self): self.do_include_compare([ '#define FOO 1', '#if FOO', 'number value is true', '#else', 'number value is false', '#endif', ], ['number value is true']) def test_conditional_if_0_elif_1(self): self.do_include_pass([ '#if 0', '#elif 1', 'PASS', '#else', 'FAIL', '#endif', ]) def test_conditional_if_1(self): self.do_include_pass([ '#if 1', 'PASS', '#else', 'FAIL', '#endif', ]) def test_conditional_if_0_or_1(self): self.do_include_pass([ '#if 0 || 1', 'PASS', '#else', 'FAIL', '#endif', ]) def test_conditional_if_1_elif_1_else(self): self.do_include_pass([ '#if 1', 'PASS', '#elif 1', 'FAIL', '#else', 'FAIL', '#endif', ]) def test_conditional_if_1_if_1(self): self.do_include_pass([ '#if 1', '#if 1', 'PASS', '#else', 'FAIL', '#endif', '#else', 'FAIL', '#endif', ]) def test_conditional_not_0(self): self.do_include_pass([ '#if !0', 'PASS', '#else', 'FAIL', '#endif', ]) def test_conditional_not_0_and_1(self): self.do_include_pass([ '#if !0 && !1', 'FAIL', '#else', 'PASS', '#endif', ]) def test_conditional_not_1(self): self.do_include_pass([ '#if !1', 'FAIL', '#else', 'PASS', '#endif', ]) def test_conditional_not_emptyval(self): self.do_include_compare([ '#define EMPTYVAL', '#ifndef EMPTYVAL', 'FAIL', '#else', 'PASS', '#endif', '#ifdef EMPTYVAL', 'PASS', '#else', 'FAIL', '#endif', ], ['PASS', 'PASS']) def test_conditional_not_nullval(self): self.do_include_pass([ '#define NULLVAL 0', '#if !NULLVAL', 'PASS', '#else', 'FAIL', '#endif', ]) def test_expand(self): self.do_include_pass([ '#define ASVAR AS', '#expand P__ASVAR__S', ]) def test_undef_defined(self): self.do_include_compare([ '#define BAR', '#undef BAR', 'BAR', ], ['BAR']) def test_undef_undefined(self): self.do_include_compare([ '#undef BAR', ], []) def test_filter_attemptSubstitution(self): self.do_include_compare([ '#filter attemptSubstitution', '@PASS@', '#unfilter attemptSubstitution', ], ['@PASS@']) def test_filter_emptyLines(self): self.do_include_compare([ 'lines with a', '', 'blank line', '#filter emptyLines', 'lines with', '', 'no blank lines', '#unfilter emptyLines', 'yet more lines with', '', 'blank lines', ], [ 'lines with a', '', 'blank line', 'lines with', 'no blank lines', 'yet more lines with', '', 'blank lines', ]) def test_filter_slashslash(self): self.do_include_compare([ '#filter slashslash', 'PASS//FAIL // FAIL', '#unfilter slashslash', 'PASS // PASS', ], [ 'PASS', 'PASS // PASS', ]) def test_filter_spaces(self): self.do_include_compare([ '#filter spaces', 'You should see two nice ascii tables', ' +-+-+-+', ' | | | |', ' +-+-+-+', '#unfilter spaces', '+-+---+', '| | |', '+-+---+', ], [ 'You should see two nice ascii tables', '+-+-+-+', '| | | |', '+-+-+-+', '+-+---+', '| | |', '+-+---+', ]) def test_filter_substitution(self): self.do_include_pass([ '#define VAR ASS', '#filter substitution', 'P@VAR@', '#unfilter substitution', ]) def test_error(self): with MockedOpen({'f': '#error spit this message out\n'}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include('f') self.assertEqual(e.args[0][-1], 'spit this message out') def test_javascript_line(self): # The preprocessor is reading the filename from somewhere not caught # by MockedOpen. tmpdir = mkdtemp() try: full = os.path.join(tmpdir, 'javascript_line.js.in') with open(full, 'w') as fh: fh.write('\n'.join([ '// Line 1', '#if 0', '// line 3', '#endif', '// line 5', '# comment', '// line 7', '// line 8', '// line 9', '# another comment', '// line 11', '#define LINE 1', '// line 13, given line number overwritten with 2', '', ])) self.pp.do_include(full) out = '\n'.join([ '// Line 1', '//@line 5 "CWDjavascript_line.js.in"', '// line 5', '//@line 7 "CWDjavascript_line.js.in"', '// line 7', '// line 8', '// line 9', '//@line 11 "CWDjavascript_line.js.in"', '// line 11', '//@line 2 "CWDjavascript_line.js.in"', '// line 13, given line number overwritten with 2', '', ]) out = out.replace('CWD', tmpdir + os.path.sep) self.assertEqual(self.pp.out.getvalue(), out) finally: shutil.rmtree(tmpdir) def test_literal(self): self.do_include_pass([ '#literal PASS', ]) def test_var_directory(self): self.do_include_pass([ '#ifdef DIRECTORY', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_file(self): self.do_include_pass([ '#ifdef FILE', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_if_0(self): self.do_include_pass([ '#define VAR 0', '#if VAR', 'FAIL', '#else', 'PASS', '#endif', ]) def test_var_if_0_elifdef(self): self.do_include_pass([ '#if 0', '#elifdef FILE', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_if_0_elifndef(self): self.do_include_pass([ '#if 0', '#elifndef VAR', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_ifdef_0(self): self.do_include_pass([ '#define VAR 0', '#ifdef VAR', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_ifdef_1_or_undef(self): self.do_include_pass([ '#define FOO 1', '#if defined(FOO) || defined(BAR)', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_ifdef_undef(self): self.do_include_pass([ '#define VAR 0', '#undef VAR', '#ifdef VAR', 'FAIL', '#else', 'PASS', '#endif', ]) def test_var_ifndef_0(self): self.do_include_pass([ '#define VAR 0', '#ifndef VAR', 'FAIL', '#else', 'PASS', '#endif', ]) def test_var_ifndef_0_and_undef(self): self.do_include_pass([ '#define FOO 0', '#if !defined(FOO) && !defined(BAR)', 'FAIL', '#else', 'PASS', '#endif', ]) def test_var_ifndef_undef(self): self.do_include_pass([ '#define VAR 0', '#undef VAR', '#ifndef VAR', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_line(self): self.do_include_pass([ '#ifdef LINE', 'PASS', '#else', 'FAIL', '#endif', ]) def test_filterDefine(self): self.do_include_pass([ '#filter substitution', '#define VAR AS', '#define VAR2 P@VAR@', '@VAR2@S', ]) def test_number_value_equals(self): self.do_include_pass([ '#define FOO 1000', '#if FOO == 1000', 'PASS', '#else', 'FAIL', '#endif', ]) def test_default_defines(self): self.pp.handleCommandLine(["-DFOO"]) self.do_include_pass([ '#if FOO == 1', 'PASS', '#else', 'FAIL', ]) def test_number_value_equals_defines(self): self.pp.handleCommandLine(["-DFOO=1000"]) self.do_include_pass([ '#if FOO == 1000', 'PASS', '#else', 'FAIL', ]) def test_octal_value_equals(self): self.do_include_pass([ '#define FOO 0100', '#if FOO == 0100', 'PASS', '#else', 'FAIL', '#endif', ]) def test_octal_value_equals_defines(self): self.pp.handleCommandLine(["-DFOO=0100"]) self.do_include_pass([ '#if FOO == 0100', 'PASS', '#else', 'FAIL', '#endif', ]) def test_value_quoted_expansion(self): """ Quoted values on the commandline don't currently have quotes stripped. Pike says this is for compat reasons. """ self.pp.handleCommandLine(['-DFOO="ABCD"']) self.do_include_compare([ '#filter substitution', '@FOO@', ], ['"ABCD"']) def test_octal_value_quoted_expansion(self): self.pp.handleCommandLine(['-DFOO="0100"']) self.do_include_compare([ '#filter substitution', '@FOO@', ], ['"0100"']) def test_number_value_not_equals_quoted_defines(self): self.pp.handleCommandLine(['-DFOO="1000"']) self.do_include_pass([ '#if FOO == 1000', 'FAIL', '#else', 'PASS', '#endif', ]) def test_octal_value_not_equals_quoted_defines(self): self.pp.handleCommandLine(['-DFOO="0100"']) self.do_include_pass([ '#if FOO == 0100', 'FAIL', '#else', 'PASS', '#endif', ]) def test_undefined_variable(self): with MockedOpen({'f': '#filter substitution\n@foo@'}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include('f') self.assertEqual(e.key, 'UNDEFINED_VAR') def test_include(self): files = { 'foo/test': '\n'.join([ '#define foo foobarbaz', '#include @inc@', '@bar@', '', ]), 'bar': '\n'.join([ '#define bar barfoobaz', '@foo@', '', ]), 'f': '\n'.join([ '#filter substitution', '#define inc ../bar', '#include foo/test', '', ]), } with MockedOpen(files): self.pp.do_include('f') self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\nbarfoobaz\n') def test_include_line(self): files = { 'test.js': '\n'.join([ '#define foo foobarbaz', '#include @inc@', '@bar@', '', ]), 'bar.js': '\n'.join([ '#define bar barfoobaz', '@foo@', '', ]), 'foo.js': '\n'.join([ 'bazfoobar', '#include bar.js', 'bazbarfoo', '', ]), 'baz.js': 'baz\n', 'f.js': '\n'.join([ '#include foo.js', '#filter substitution', '#define inc bar.js', '#include test.js', '#include baz.js', 'fin', '', ]), } with MockedOpen(files): self.pp.do_include('f.js') self.assertEqual(self.pp.out.getvalue(), ('//@line 1 "CWD/foo.js"\n' 'bazfoobar\n' '//@line 2 "CWD/bar.js"\n' '@foo@\n' '//@line 3 "CWD/foo.js"\n' 'bazbarfoo\n' '//@line 2 "CWD/bar.js"\n' 'foobarbaz\n' '//@line 3 "CWD/test.js"\n' 'barfoobaz\n' '//@line 1 "CWD/baz.js"\n' 'baz\n' '//@line 6 "CWD/f.js"\n' 'fin\n').replace('CWD/', os.getcwd() + os.path.sep)) def test_include_missing_file(self): with MockedOpen({'f': '#include foo\n'}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include('f') self.assertEqual(e.exception.key, 'FILE_NOT_FOUND') def test_include_undefined_variable(self): with MockedOpen({'f': '#filter substitution\n#include @foo@\n'}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include('f') self.assertEqual(e.exception.key, 'UNDEFINED_VAR') def test_include_literal_at(self): files = { '@foo@': '#define foo foobarbaz\n', 'f': '#include @foo@\n#filter substitution\n@foo@\n', } with MockedOpen(files): self.pp.do_include('f') self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\n') def test_command_line_literal_at(self): with MockedOpen({"@[email protected]": '@foo@\n'}): self.pp.handleCommandLine(['-Fsubstitution', '-Dfoo=foobarbaz', '@[email protected]']) self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\n')
class TestPreprocessor(unittest.TestCase): """ Unit tests for the Context class """ def setUp(self): self.pp = Preprocessor() self.pp.out = StringIO() def do_include_compare(self, content_lines, expected_lines): content = '%s' % '\n'.join(content_lines) expected = '%s'.rstrip() % '\n'.join(expected_lines) with MockedOpen({'dummy': content}): self.pp.do_include('dummy') self.assertEqual(self.pp.out.getvalue().rstrip('\n'), expected) def do_include_pass(self, content_lines): self.do_include_compare(content_lines, ['PASS']) def test_conditional_if_0(self): self.do_include_pass([ '#if 0', 'FAIL', '#else', 'PASS', '#endif', ]) def test_no_marker(self): lines = [ '#if 0', 'PASS', '#endif', ] self.pp.setMarker(None) self.do_include_compare(lines, lines) def test_string_value(self): self.do_include_compare([ '#define FOO STRING', '#if FOO', 'string value is true', '#else', 'string value is false', '#endif', ], ['string value is false']) def test_number_value(self): self.do_include_compare([ '#define FOO 1', '#if FOO', 'number value is true', '#else', 'number value is false', '#endif', ], ['number value is true']) def test_conditional_if_0_elif_1(self): self.do_include_pass([ '#if 0', '#elif 1', 'PASS', '#else', 'FAIL', '#endif', ]) def test_conditional_if_1(self): self.do_include_pass([ '#if 1', 'PASS', '#else', 'FAIL', '#endif', ]) def test_conditional_if_0_or_1(self): self.do_include_pass([ '#if 0 || 1', 'PASS', '#else', 'FAIL', '#endif', ]) def test_conditional_if_1_elif_1_else(self): self.do_include_pass([ '#if 1', 'PASS', '#elif 1', 'FAIL', '#else', 'FAIL', '#endif', ]) def test_conditional_if_1_if_1(self): self.do_include_pass([ '#if 1', '#if 1', 'PASS', '#else', 'FAIL', '#endif', '#else', 'FAIL', '#endif', ]) def test_conditional_not_0(self): self.do_include_pass([ '#if !0', 'PASS', '#else', 'FAIL', '#endif', ]) def test_conditional_not_0_and_1(self): self.do_include_pass([ '#if !0 && !1', 'FAIL', '#else', 'PASS', '#endif', ]) def test_conditional_not_1(self): self.do_include_pass([ '#if !1', 'FAIL', '#else', 'PASS', '#endif', ]) def test_conditional_not_emptyval(self): self.do_include_compare([ '#define EMPTYVAL', '#ifndef EMPTYVAL', 'FAIL', '#else', 'PASS', '#endif', '#ifdef EMPTYVAL', 'PASS', '#else', 'FAIL', '#endif', ], ['PASS', 'PASS']) def test_conditional_not_nullval(self): self.do_include_pass([ '#define NULLVAL 0', '#if !NULLVAL', 'PASS', '#else', 'FAIL', '#endif', ]) def test_expand(self): self.do_include_pass([ '#define ASVAR AS', '#expand P__ASVAR__S', ]) def test_undef_defined(self): self.do_include_compare([ '#define BAR', '#undef BAR', 'BAR', ], ['BAR']) def test_undef_undefined(self): self.do_include_compare([ '#undef BAR', ], []) def test_filter_attemptSubstitution(self): self.do_include_compare([ '#filter attemptSubstitution', '@PASS@', '#unfilter attemptSubstitution', ], ['@PASS@']) def test_filter_emptyLines(self): self.do_include_compare([ 'lines with a', '', 'blank line', '#filter emptyLines', 'lines with', '', 'no blank lines', '#unfilter emptyLines', 'yet more lines with', '', 'blank lines', ], [ 'lines with a', '', 'blank line', 'lines with', 'no blank lines', 'yet more lines with', '', 'blank lines', ]) def test_filter_slashslash(self): self.do_include_compare([ '#filter slashslash', 'PASS//FAIL // FAIL', '#unfilter slashslash', 'PASS // PASS', ], [ 'PASS', 'PASS // PASS', ]) def test_filter_spaces(self): self.do_include_compare([ '#filter spaces', 'You should see two nice ascii tables', ' +-+-+-+', ' | | | |', ' +-+-+-+', '#unfilter spaces', '+-+---+', '| | |', '+-+---+', ], [ 'You should see two nice ascii tables', '+-+-+-+', '| | | |', '+-+-+-+', '+-+---+', '| | |', '+-+---+', ]) def test_filter_substitution(self): self.do_include_pass([ '#define VAR ASS', '#filter substitution', 'P@VAR@', '#unfilter substitution', ]) def test_error(self): with MockedOpen({'f': '#error spit this message out\n'}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include('f') self.assertEqual(e.args[0][-1], 'spit this message out') def test_javascript_line(self): # The preprocessor is reading the filename from somewhere not caught # by MockedOpen. tmpdir = mkdtemp() try: full = os.path.join(tmpdir, 'javascript_line.js.in') with open(full, 'w') as fh: fh.write('\n'.join([ '// Line 1', '#if 0', '// line 3', '#endif', '// line 5', '# comment', '// line 7', '// line 8', '// line 9', '# another comment', '// line 11', '#define LINE 1', '// line 13, given line number overwritten with 2', '', ])) self.pp.do_include(full) out = '\n'.join([ '// Line 1', '//@line 5 "CWDjavascript_line.js.in"', '// line 5', '//@line 7 "CWDjavascript_line.js.in"', '// line 7', '// line 8', '// line 9', '//@line 11 "CWDjavascript_line.js.in"', '// line 11', '//@line 2 "CWDjavascript_line.js.in"', '// line 13, given line number overwritten with 2', '', ]) out = out.replace('CWD', tmpdir + os.path.sep) self.assertEqual(self.pp.out.getvalue(), out) finally: shutil.rmtree(tmpdir) def test_literal(self): self.do_include_pass([ '#literal PASS', ]) def test_var_directory(self): self.do_include_pass([ '#ifdef DIRECTORY', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_file(self): self.do_include_pass([ '#ifdef FILE', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_if_0(self): self.do_include_pass([ '#define VAR 0', '#if VAR', 'FAIL', '#else', 'PASS', '#endif', ]) def test_var_if_0_elifdef(self): self.do_include_pass([ '#if 0', '#elifdef FILE', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_if_0_elifndef(self): self.do_include_pass([ '#if 0', '#elifndef VAR', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_ifdef_0(self): self.do_include_pass([ '#define VAR 0', '#ifdef VAR', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_ifdef_1_or_undef(self): self.do_include_pass([ '#define FOO 1', '#if defined(FOO) || defined(BAR)', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_ifdef_undef(self): self.do_include_pass([ '#define VAR 0', '#undef VAR', '#ifdef VAR', 'FAIL', '#else', 'PASS', '#endif', ]) def test_var_ifndef_0(self): self.do_include_pass([ '#define VAR 0', '#ifndef VAR', 'FAIL', '#else', 'PASS', '#endif', ]) def test_var_ifndef_0_and_undef(self): self.do_include_pass([ '#define FOO 0', '#if !defined(FOO) && !defined(BAR)', 'FAIL', '#else', 'PASS', '#endif', ]) def test_var_ifndef_undef(self): self.do_include_pass([ '#define VAR 0', '#undef VAR', '#ifndef VAR', 'PASS', '#else', 'FAIL', '#endif', ]) def test_var_line(self): self.do_include_pass([ '#ifdef LINE', 'PASS', '#else', 'FAIL', '#endif', ]) def test_filterDefine(self): self.do_include_pass([ '#filter substitution', '#define VAR AS', '#define VAR2 P@VAR@', '@VAR2@S', ]) def test_number_value_equals(self): self.do_include_pass([ '#define FOO 1000', '#if FOO == 1000', 'PASS', '#else', 'FAIL', '#endif', ]) def test_default_defines(self): self.pp.handleCommandLine(["-DFOO"]) self.do_include_pass([ '#if FOO == 1', 'PASS', '#else', 'FAIL', ]) def test_number_value_equals_defines(self): self.pp.handleCommandLine(["-DFOO=1000"]) self.do_include_pass([ '#if FOO == 1000', 'PASS', '#else', 'FAIL', ]) def test_octal_value_equals(self): self.do_include_pass([ '#define FOO 0100', '#if FOO == 0100', 'PASS', '#else', 'FAIL', '#endif', ]) def test_octal_value_equals_defines(self): self.pp.handleCommandLine(["-DFOO=0100"]) self.do_include_pass([ '#if FOO == 0100', 'PASS', '#else', 'FAIL', '#endif', ]) def test_value_quoted_expansion(self): """ Quoted values on the commandline don't currently have quotes stripped. Pike says this is for compat reasons. """ self.pp.handleCommandLine(['-DFOO="ABCD"']) self.do_include_compare([ '#filter substitution', '@FOO@', ], ['"ABCD"']) def test_octal_value_quoted_expansion(self): self.pp.handleCommandLine(['-DFOO="0100"']) self.do_include_compare([ '#filter substitution', '@FOO@', ], ['"0100"']) def test_number_value_not_equals_quoted_defines(self): self.pp.handleCommandLine(['-DFOO="1000"']) self.do_include_pass([ '#if FOO == 1000', 'FAIL', '#else', 'PASS', '#endif', ]) def test_octal_value_not_equals_quoted_defines(self): self.pp.handleCommandLine(['-DFOO="0100"']) self.do_include_pass([ '#if FOO == 0100', 'FAIL', '#else', 'PASS', '#endif', ]) def test_undefined_variable(self): with MockedOpen({'f': '#filter substitution\n@foo@'}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include('f') self.assertEqual(e.key, 'UNDEFINED_VAR') def test_include(self): files = { 'foo/test': '\n'.join([ '#define foo foobarbaz', '#include @inc@', '@bar@', '', ]), 'bar': '\n'.join([ '#define bar barfoobaz', '@foo@', '', ]), 'f': '\n'.join([ '#filter substitution', '#define inc ../bar', '#include foo/test', '', ]), } with MockedOpen(files): self.pp.do_include('f') self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\nbarfoobaz\n') def test_include_line(self): files = { 'srcdir/test.js': '\n'.join([ '#define foo foobarbaz', '#include @inc@', '@bar@', '', ]), 'srcdir/bar.js': '\n'.join([ '#define bar barfoobaz', '@foo@', '', ]), 'srcdir/foo.js': '\n'.join([ 'bazfoobar', '#include bar.js', 'bazbarfoo', '', ]), 'objdir/baz.js': 'baz\n', 'srcdir/f.js': '\n'.join([ '#include foo.js', '#filter substitution', '#define inc bar.js', '#include test.js', '#include ../objdir/baz.js', 'fin', '', ]), } preprocessed = ('//@line 1 "$SRCDIR/foo.js"\n' 'bazfoobar\n' '//@line 2 "$SRCDIR/bar.js"\n' '@foo@\n' '//@line 3 "$SRCDIR/foo.js"\n' 'bazbarfoo\n' '//@line 2 "$SRCDIR/bar.js"\n' 'foobarbaz\n' '//@line 3 "$SRCDIR/test.js"\n' 'barfoobaz\n' '//@line 1 "$OBJDIR/baz.js"\n' 'baz\n' '//@line 6 "$SRCDIR/f.js"\n' 'fin\n').replace('DIR/', 'DIR' + os.sep) # Try with separate srcdir/objdir with MockedOpen(files): self.pp.topsrcdir = os.path.abspath('srcdir') self.pp.topobjdir = os.path.abspath('objdir') self.pp.do_include('srcdir/f.js') self.assertEqual(self.pp.out.getvalue(), preprocessed) # Try again with relative objdir self.setUp() files['srcdir/objdir/baz.js'] = files['objdir/baz.js'] del files['objdir/baz.js'] files['srcdir/f.js'] = files['srcdir/f.js'].replace('../', '') with MockedOpen(files): self.pp.topsrcdir = os.path.abspath('srcdir') self.pp.topobjdir = os.path.abspath('srcdir/objdir') self.pp.do_include('srcdir/f.js') self.assertEqual(self.pp.out.getvalue(), preprocessed) def test_include_missing_file(self): with MockedOpen({'f': '#include foo\n'}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include('f') self.assertEqual(e.exception.key, 'FILE_NOT_FOUND') def test_include_undefined_variable(self): with MockedOpen({'f': '#filter substitution\n#include @foo@\n'}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include('f') self.assertEqual(e.exception.key, 'UNDEFINED_VAR') def test_include_literal_at(self): files = { '@foo@': '#define foo foobarbaz\n', 'f': '#include @foo@\n#filter substitution\n@foo@\n', } with MockedOpen(files): self.pp.do_include('f') self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\n') def test_command_line_literal_at(self): with MockedOpen({"@[email protected]": '@foo@\n'}): self.pp.handleCommandLine( ['-Fsubstitution', '-Dfoo=foobarbaz', '@[email protected]']) self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\n') def test_invalid_ifdef(self): with MockedOpen({'dummy': '#ifdef FOO == BAR\nPASS\n#endif'}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include('dummy') self.assertEqual(e.exception.key, 'INVALID_VAR') with MockedOpen({'dummy': '#ifndef FOO == BAR\nPASS\n#endif'}): with self.assertRaises(Preprocessor.Error) as e: self.pp.do_include('dummy') self.assertEqual(e.exception.key, 'INVALID_VAR') # Trailing whitespaces, while not nice, shouldn't be an error. self.do_include_pass([ '#ifndef FOO ', 'PASS', '#endif', ])
class JarMaker(object): """JarMaker reads jar.mn files and process those into jar files or flat directories, along with chrome.manifest files. """ ignore = re.compile("\s*(\#.*)?$") jarline = re.compile("(?:(?P<jarfile>[\w\d.\-\_\\\/{}]+).jar\:)|(?:\s*(\#.*)?)\s*$") relsrcline = re.compile("relativesrcdir\s+(?P<relativesrcdir>.+?):") regline = re.compile("\%\s+(.*)$") entryre = "(?P<optPreprocess>\*)?(?P<optOverwrite>\+?)\s+" entryline = re.compile( entryre + "(?P<output>[\w\d.\-\_\\\/\+\@]+)\s*(\((?P<locale>\%?)(?P<source>[\w\d.\-\_\\\/\@\*]+)\))?\s*$" ) def __init__(self, outputFormat="flat", useJarfileManifest=True, useChromeManifest=False): self.outputFormat = outputFormat self.useJarfileManifest = useJarfileManifest self.useChromeManifest = useChromeManifest self.pp = Preprocessor() self.topsourcedir = None self.sourcedirs = [] self.localedirs = None self.l10nbase = None self.l10nmerge = None self.relativesrcdir = None self.rootManifestAppId = None def getCommandLineParser(self): """Get a optparse.OptionParser for jarmaker. This OptionParser has the options for jarmaker as well as the options for the inner PreProcessor. """ # HACK, we need to unescape the string variables we get, # the perl versions didn't grok strings right p = self.pp.getCommandLineParser(unescapeDefines=True) p.add_option( "-f", type="choice", default="jar", choices=("jar", "flat", "symlink"), help="fileformat used for output", metavar="[jar, flat, symlink]", ) p.add_option("-v", action="store_true", dest="verbose", help="verbose output") p.add_option("-q", action="store_false", dest="verbose", help="verbose output") p.add_option("-e", action="store_true", help="create chrome.manifest instead of jarfile.manifest") p.add_option("-s", type="string", action="append", default=[], help="source directory") p.add_option("-t", type="string", help="top source directory") p.add_option("-c", "--l10n-src", type="string", action="append", help="localization directory") p.add_option( "--l10n-base", type="string", action="store", help="base directory to be used for localization (requires relativesrcdir)", ) p.add_option( "--locale-mergedir", type="string", action="store", help="base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)", ) p.add_option("--relativesrcdir", type="string", help="relativesrcdir to be used for localization") p.add_option("-j", type="string", help="jarfile directory") p.add_option( "--root-manifest-entry-appid", type="string", help="add an app id specific root chrome manifest entry." ) return p def processIncludes(self, includes): """Process given includes with the inner PreProcessor. Only use this for #defines, the includes shouldn't generate content. """ self.pp.out = StringIO() for inc in includes: self.pp.do_include(inc) includesvalue = self.pp.out.getvalue() if includesvalue: logging.info("WARNING: Includes produce non-empty output") self.pp.out = None def finalizeJar(self, jarPath, chromebasepath, register, doZip=True): """Helper method to write out the chrome registration entries to jarfile.manifest or chrome.manifest, or both. The actual file processing is done in updateManifest. """ # rewrite the manifest, if entries given if not register: return chromeManifest = os.path.join(os.path.dirname(jarPath), "..", "chrome.manifest") if self.useJarfileManifest: self.updateManifest(jarPath + ".manifest", chromebasepath.format(""), register) addEntriesToListFile(chromeManifest, ["manifest chrome/{0}.manifest".format(os.path.basename(jarPath))]) if self.useChromeManifest: self.updateManifest(chromeManifest, chromebasepath.format("chrome/"), register) # If requested, add a root chrome manifest entry (assumed to be in the parent directory # of chromeManifest) with the application specific id. In cases where we're building # lang packs, the root manifest must know about application sub directories. if self.rootManifestAppId: rootChromeManifest = os.path.join( os.path.normpath(os.path.dirname(chromeManifest)), "..", "chrome.manifest" ) rootChromeManifest = os.path.normpath(rootChromeManifest) chromeDir = os.path.basename(os.path.dirname(os.path.normpath(chromeManifest))) logging.info("adding '%s' entry to root chrome manifest appid=%s" % (chromeDir, self.rootManifestAppId)) addEntriesToListFile( rootChromeManifest, ["manifest %s/chrome.manifest application=%s" % (chromeDir, self.rootManifestAppId)] ) def updateManifest(self, manifestPath, chromebasepath, register): """updateManifest replaces the % in the chrome registration entries with the given chrome base path, and updates the given manifest file. """ lock = lock_file(manifestPath + ".lck") try: myregister = dict.fromkeys(map(lambda s: s.replace("%", chromebasepath), register.iterkeys())) manifestExists = os.path.isfile(manifestPath) mode = manifestExists and "r+b" or "wb" mf = open(manifestPath, mode) if manifestExists: # import previous content into hash, ignoring empty ones and comments imf = re.compile("(#.*)?$") for l in re.split("[\r\n]+", mf.read()): if imf.match(l): continue myregister[l] = None mf.seek(0) for k in sorted(myregister.iterkeys()): mf.write(k + os.linesep) mf.close() finally: lock = None def makeJar(self, infile, jardir): """makeJar is the main entry point to JarMaker. It takes the input file, the output directory, the source dirs and the top source dir as argument, and optionally the l10n dirs. """ # making paths absolute, guess srcdir if file and add to sourcedirs _normpath = lambda p: os.path.normpath(os.path.abspath(p)) self.topsourcedir = _normpath(self.topsourcedir) self.sourcedirs = [_normpath(p) for p in self.sourcedirs] if self.localedirs: self.localedirs = [_normpath(p) for p in self.localedirs] elif self.relativesrcdir: self.localedirs = self.generateLocaleDirs(self.relativesrcdir) if isinstance(infile, basestring): logging.info("processing " + infile) self.sourcedirs.append(_normpath(os.path.dirname(infile))) pp = self.pp.clone() pp.out = StringIO() pp.do_include(infile) lines = PushbackIter(pp.out.getvalue().splitlines()) try: while True: l = lines.next() m = self.jarline.match(l) if not m: raise RuntimeError(l) if m.group("jarfile") is None: # comment continue self.processJarSection(m.group("jarfile"), lines, jardir) except StopIteration: # we read the file pass return def generateLocaleDirs(self, relativesrcdir): if os.path.basename(relativesrcdir) == "locales": # strip locales l10nrelsrcdir = os.path.dirname(relativesrcdir) else: l10nrelsrcdir = relativesrcdir locdirs = [] # generate locales dirs, merge, l10nbase, en-US if self.l10nmerge: locdirs.append(os.path.join(self.l10nmerge, l10nrelsrcdir)) if self.l10nbase: locdirs.append(os.path.join(self.l10nbase, l10nrelsrcdir)) if self.l10nmerge or not self.l10nbase: # add en-US if we merge, or if it's not l10n locdirs.append(os.path.join(self.topsourcedir, relativesrcdir, "en-US")) return locdirs def processJarSection(self, jarfile, lines, jardir): """Internal method called by makeJar to actually process a section of a jar.mn file. jarfile is the basename of the jarfile or the directory name for flat output, lines is a PushbackIter of the lines of jar.mn, the remaining options are carried over from makeJar. """ # chromebasepath is used for chrome registration manifests # {0} is getting replaced with chrome/ for chrome.manifest, and with # an empty string for jarfile.manifest chromebasepath = "{0}" + os.path.basename(jarfile) if self.outputFormat == "jar": chromebasepath = "jar:" + chromebasepath + ".jar!" chromebasepath += "/" jarfile = os.path.join(jardir, jarfile) jf = None if self.outputFormat == "jar": # jar jarfilepath = jarfile + ".jar" try: os.makedirs(os.path.dirname(jarfilepath)) except OSError, error: if error.errno != errno.EEXIST: raise jf = ZipFile(jarfilepath, "a", lock=True) outHelper = self.OutputHelper_jar(jf) else:
class JarMaker(object): '''JarMaker reads jar.mn files and process those into jar files or flat directories, along with chrome.manifest files. ''' ignore = re.compile('\s*(\#.*)?$') jarline = re.compile('(?:(?P<jarfile>[\w\d.\-\_\\\/]+).jar\:)|(?:\s*(\#.*)?)\s*$') relsrcline = re.compile('relativesrcdir\s+(?P<relativesrcdir>.+?):') regline = re.compile('\%\s+(.*)$') entryre = '(?P<optPreprocess>\*)?(?P<optOverwrite>\+?)\s+' entryline = re.compile(entryre + '(?P<output>[\w\d.\-\_\\\/\+\@]+)\s*(\((?P<locale>\%?)(?P<source>[\w\d.\-\_\\\/\@]+)\))?\s*$' ) def __init__(self, outputFormat='flat', useJarfileManifest=True, useChromeManifest=False): self.outputFormat = outputFormat self.useJarfileManifest = useJarfileManifest self.useChromeManifest = useChromeManifest self.pp = Preprocessor() self.topsourcedir = None self.sourcedirs = [] self.localedirs = None self.l10nbase = None self.l10nmerge = None self.relativesrcdir = None self.rootManifestAppId = None def getCommandLineParser(self): '''Get a optparse.OptionParser for jarmaker. This OptionParser has the options for jarmaker as well as the options for the inner PreProcessor. ''' # HACK, we need to unescape the string variables we get, # the perl versions didn't grok strings right p = self.pp.getCommandLineParser(unescapeDefines=True) p.add_option('-f', type='choice', default='jar', choices=('jar', 'flat', 'symlink'), help='fileformat used for output', metavar='[jar, flat, symlink]', ) p.add_option('-v', action='store_true', dest='verbose', help='verbose output') p.add_option('-q', action='store_false', dest='verbose', help='verbose output') p.add_option('-e', action='store_true', help='create chrome.manifest instead of jarfile.manifest' ) p.add_option('-s', type='string', action='append', default=[], help='source directory') p.add_option('-t', type='string', help='top source directory') p.add_option('-c', '--l10n-src', type='string', action='append' , help='localization directory') p.add_option('--l10n-base', type='string', action='store', help='base directory to be used for localization (requires relativesrcdir)' ) p.add_option('--locale-mergedir', type='string', action='store' , help='base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)' ) p.add_option('--relativesrcdir', type='string', help='relativesrcdir to be used for localization') p.add_option('-j', type='string', help='jarfile directory') p.add_option('--root-manifest-entry-appid', type='string', help='add an app id specific root chrome manifest entry.' ) return p def processIncludes(self, includes): '''Process given includes with the inner PreProcessor. Only use this for #defines, the includes shouldn't generate content. ''' self.pp.out = StringIO() for inc in includes: self.pp.do_include(inc) includesvalue = self.pp.out.getvalue() if includesvalue: logging.info('WARNING: Includes produce non-empty output') self.pp.out = None def finalizeJar(self, jarPath, chromebasepath, register, doZip=True): '''Helper method to write out the chrome registration entries to jarfile.manifest or chrome.manifest, or both. The actual file processing is done in updateManifest. ''' # rewrite the manifest, if entries given if not register: return chromeManifest = os.path.join(os.path.dirname(jarPath), '..', 'chrome.manifest') if self.useJarfileManifest: self.updateManifest(jarPath + '.manifest', chromebasepath.format(''), register) addEntriesToListFile(chromeManifest, ['manifest chrome/{0}.manifest'.format(os.path.basename(jarPath))]) if self.useChromeManifest: self.updateManifest(chromeManifest, chromebasepath.format('chrome/'), register) # If requested, add a root chrome manifest entry (assumed to be in the parent directory # of chromeManifest) with the application specific id. In cases where we're building # lang packs, the root manifest must know about application sub directories. if self.rootManifestAppId: rootChromeManifest = \ os.path.join(os.path.normpath(os.path.dirname(chromeManifest)), '..', 'chrome.manifest') rootChromeManifest = os.path.normpath(rootChromeManifest) chromeDir = \ os.path.basename(os.path.dirname(os.path.normpath(chromeManifest))) logging.info("adding '%s' entry to root chrome manifest appid=%s" % (chromeDir, self.rootManifestAppId)) addEntriesToListFile(rootChromeManifest, ['manifest %s/chrome.manifest application=%s' % (chromeDir, self.rootManifestAppId)]) def updateManifest(self, manifestPath, chromebasepath, register): '''updateManifest replaces the % in the chrome registration entries with the given chrome base path, and updates the given manifest file. ''' lock = lock_file(manifestPath + '.lck') try: myregister = dict.fromkeys(map(lambda s: s.replace('%', chromebasepath), register.iterkeys())) manifestExists = os.path.isfile(manifestPath) mode = manifestExists and 'r+b' or 'wb' mf = open(manifestPath, mode) if manifestExists: # import previous content into hash, ignoring empty ones and comments imf = re.compile('(#.*)?$') for l in re.split('[\r\n]+', mf.read()): if imf.match(l): continue myregister[l] = None mf.seek(0) for k in sorted(myregister.iterkeys()): mf.write(k + os.linesep) mf.close() finally: lock = None def makeJar(self, infile, jardir): '''makeJar is the main entry point to JarMaker. It takes the input file, the output directory, the source dirs and the top source dir as argument, and optionally the l10n dirs. ''' # making paths absolute, guess srcdir if file and add to sourcedirs _normpath = lambda p: os.path.normpath(os.path.abspath(p)) self.topsourcedir = _normpath(self.topsourcedir) self.sourcedirs = [_normpath(p) for p in self.sourcedirs] if self.localedirs: self.localedirs = [_normpath(p) for p in self.localedirs] elif self.relativesrcdir: self.localedirs = \ self.generateLocaleDirs(self.relativesrcdir) if isinstance(infile, basestring): logging.info('processing ' + infile) self.sourcedirs.append(_normpath(os.path.dirname(infile))) pp = self.pp.clone() pp.out = StringIO() pp.do_include(infile) lines = PushbackIter(pp.out.getvalue().splitlines()) try: while True: l = lines.next() m = self.jarline.match(l) if not m: raise RuntimeError(l) if m.group('jarfile') is None: # comment continue self.processJarSection(m.group('jarfile'), lines, jardir) except StopIteration: # we read the file pass return def generateLocaleDirs(self, relativesrcdir): if os.path.basename(relativesrcdir) == 'locales': # strip locales l10nrelsrcdir = os.path.dirname(relativesrcdir) else: l10nrelsrcdir = relativesrcdir locdirs = [] # generate locales dirs, merge, l10nbase, en-US if self.l10nmerge: locdirs.append(os.path.join(self.l10nmerge, l10nrelsrcdir)) if self.l10nbase: locdirs.append(os.path.join(self.l10nbase, l10nrelsrcdir)) if self.l10nmerge or not self.l10nbase: # add en-US if we merge, or if it's not l10n locdirs.append(os.path.join(self.topsourcedir, relativesrcdir, 'en-US')) return locdirs def processJarSection(self, jarfile, lines, jardir): '''Internal method called by makeJar to actually process a section of a jar.mn file. jarfile is the basename of the jarfile or the directory name for flat output, lines is a PushbackIter of the lines of jar.mn, the remaining options are carried over from makeJar. ''' # chromebasepath is used for chrome registration manifests # {0} is getting replaced with chrome/ for chrome.manifest, and with # an empty string for jarfile.manifest chromebasepath = '{0}' + os.path.basename(jarfile) if self.outputFormat == 'jar': chromebasepath = 'jar:' + chromebasepath + '.jar!' chromebasepath += '/' jarfile = os.path.join(jardir, jarfile) jf = None if self.outputFormat == 'jar': # jar jarfilepath = jarfile + '.jar' try: os.makedirs(os.path.dirname(jarfilepath)) except OSError, error: if error.errno != errno.EEXIST: raise jf = ZipFile(jarfilepath, 'a', lock=True) outHelper = self.OutputHelper_jar(jf) else:
class JarMaker(object): """JarMaker reads jar.mn files and process those into jar files or flat directories, along with chrome.manifest files. """ def __init__(self, outputFormat="flat", useJarfileManifest=True, useChromeManifest=False): self.outputFormat = outputFormat self.useJarfileManifest = useJarfileManifest self.useChromeManifest = useChromeManifest self.pp = Preprocessor() self.topsourcedir = None self.sourcedirs = [] self.localedirs = None self.l10nbase = None self.l10nmerge = None self.relativesrcdir = None self.rootManifestAppId = None def getCommandLineParser(self): """Get a optparse.OptionParser for jarmaker. This OptionParser has the options for jarmaker as well as the options for the inner PreProcessor. """ # HACK, we need to unescape the string variables we get, # the perl versions didn't grok strings right p = self.pp.getCommandLineParser(unescapeDefines=True) p.add_option( "-f", type="choice", default="jar", choices=("jar", "flat", "symlink"), help="fileformat used for output", metavar="[jar, flat, symlink]", ) p.add_option("-v", action="store_true", dest="verbose", help="verbose output") p.add_option("-q", action="store_false", dest="verbose", help="verbose output") p.add_option("-e", action="store_true", help="create chrome.manifest instead of jarfile.manifest") p.add_option("-s", type="string", action="append", default=[], help="source directory") p.add_option("-t", type="string", help="top source directory") p.add_option("-c", "--l10n-src", type="string", action="append", help="localization directory") p.add_option( "--l10n-base", type="string", action="store", help="base directory to be used for localization (requires relativesrcdir)", ) p.add_option( "--locale-mergedir", type="string", action="store", help="base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)", ) p.add_option("--relativesrcdir", type="string", help="relativesrcdir to be used for localization") p.add_option("-d", type="string", help="base directory") p.add_option( "--root-manifest-entry-appid", type="string", help="add an app id specific root chrome manifest entry." ) return p def processIncludes(self, includes): """Process given includes with the inner PreProcessor. Only use this for #defines, the includes shouldn't generate content. """ self.pp.out = StringIO() for inc in includes: self.pp.do_include(inc) includesvalue = self.pp.out.getvalue() if includesvalue: logging.info("WARNING: Includes produce non-empty output") self.pp.out = None def finalizeJar(self, jardir, jarbase, jarname, chromebasepath, register, doZip=True): """Helper method to write out the chrome registration entries to jarfile.manifest or chrome.manifest, or both. The actual file processing is done in updateManifest. """ # rewrite the manifest, if entries given if not register: return chromeManifest = os.path.join(jardir, jarbase, "chrome.manifest") if self.useJarfileManifest: self.updateManifest( os.path.join(jardir, jarbase, jarname + ".manifest"), chromebasepath.format(""), register ) if jarname != "chrome": addEntriesToListFile(chromeManifest, ["manifest {0}.manifest".format(jarname)]) if self.useChromeManifest: chromebase = os.path.dirname(jarname) + "/" self.updateManifest(chromeManifest, chromebasepath.format(chromebase), register) # If requested, add a root chrome manifest entry (assumed to be in the parent directory # of chromeManifest) with the application specific id. In cases where we're building # lang packs, the root manifest must know about application sub directories. if self.rootManifestAppId: rootChromeManifest = os.path.join( os.path.normpath(os.path.dirname(chromeManifest)), "..", "chrome.manifest" ) rootChromeManifest = os.path.normpath(rootChromeManifest) chromeDir = os.path.basename(os.path.dirname(os.path.normpath(chromeManifest))) logging.info("adding '%s' entry to root chrome manifest appid=%s" % (chromeDir, self.rootManifestAppId)) addEntriesToListFile( rootChromeManifest, ["manifest %s/chrome.manifest application=%s" % (chromeDir, self.rootManifestAppId)] ) def updateManifest(self, manifestPath, chromebasepath, register): """updateManifest replaces the % in the chrome registration entries with the given chrome base path, and updates the given manifest file. """ myregister = dict.fromkeys(map(lambda s: s.replace("%", chromebasepath), register)) addEntriesToListFile(manifestPath, myregister.iterkeys()) def makeJar(self, infile, jardir): """makeJar is the main entry point to JarMaker. It takes the input file, the output directory, the source dirs and the top source dir as argument, and optionally the l10n dirs. """ # making paths absolute, guess srcdir if file and add to sourcedirs _normpath = lambda p: os.path.normpath(os.path.abspath(p)) self.topsourcedir = _normpath(self.topsourcedir) self.sourcedirs = [_normpath(p) for p in self.sourcedirs] if self.localedirs: self.localedirs = [_normpath(p) for p in self.localedirs] elif self.relativesrcdir: self.localedirs = self.generateLocaleDirs(self.relativesrcdir) if isinstance(infile, basestring): logging.info("processing " + infile) self.sourcedirs.append(_normpath(os.path.dirname(infile))) pp = self.pp.clone() pp.out = JarManifestParser() pp.do_include(infile) for info in pp.out: self.processJarSection(info, jardir) def generateLocaleDirs(self, relativesrcdir): if os.path.basename(relativesrcdir) == "locales": # strip locales l10nrelsrcdir = os.path.dirname(relativesrcdir) else: l10nrelsrcdir = relativesrcdir locdirs = [] # generate locales dirs, merge, l10nbase, en-US if self.l10nmerge: locdirs.append(os.path.join(self.l10nmerge, l10nrelsrcdir)) if self.l10nbase: locdirs.append(os.path.join(self.l10nbase, l10nrelsrcdir)) if self.l10nmerge or not self.l10nbase: # add en-US if we merge, or if it's not l10n locdirs.append(os.path.join(self.topsourcedir, relativesrcdir, "en-US")) return locdirs def processJarSection(self, jarinfo, jardir): """Internal method called by makeJar to actually process a section of a jar.mn file. """ # chromebasepath is used for chrome registration manifests # {0} is getting replaced with chrome/ for chrome.manifest, and with # an empty string for jarfile.manifest chromebasepath = "{0}" + os.path.basename(jarinfo.name) if self.outputFormat == "jar": chromebasepath = "jar:" + chromebasepath + ".jar!" chromebasepath += "/" jarfile = os.path.join(jardir, jarinfo.base, jarinfo.name) jf = None if self.outputFormat == "jar": # jar jarfilepath = jarfile + ".jar" try: os.makedirs(os.path.dirname(jarfilepath)) except OSError, error: if error.errno != errno.EEXIST: raise jf = ZipFile(jarfilepath, "a", lock=True) outHelper = self.OutputHelper_jar(jf) else: