示例#1
0
def main(output, input_file):
    pp = Preprocessor()
    pp.context.update({
        'VERSION': 'xul%s' % buildconfig.substs['MOZILLA_SYMBOLVERSION'],
    })
    pp.out = output
    pp.do_include(input_file)
示例#2
0
def main(args):
    pp = Preprocessor()
    optparser = pp.getCommandLineParser()
    optparser.add_option('--nss-file', action='append',
                         type='string', dest='nss_files', default=[],
                         help='Specify a .def file that should have NSS\'s processing rules applied to it')
    options, deffiles = optparser.parse_args(args)

    symbols = set()
    for f in options.nss_files:
        symbols |= extract_symbols(nss_preprocess_file(f))
    for f in deffiles:
        # Start each deffile off with a clean slate.
        defpp = pp.clone()
        symbols |= extract_symbols(preprocess_file(defpp, f))

    script = """{
global:
  %s
local:
  *;
};
"""
    with FileAvoidWrite(options.output) as f:
        f.write(script % '\n  '.join("%s;" % s for s in sorted(symbols)))
示例#3
0
def main(output, input_file, version):
    pp = Preprocessor()
    pp.context.update({
        'VERSION': version,
    })
    pp.out = output
    pp.do_include(input_file)
def main(output, input_file, version):
    pp = Preprocessor()
    pp.context.update(
        {
            "VERSION": version,
        }
    )
    pp.out = output
    pp.do_include(input_file)
示例#5
0
    def copy(self, dest, skip_if_older=True):
        '''
        Invokes the preprocessor to create the destination file.
        '''
        if isinstance(dest, basestring):
            dest = Dest(dest)
        else:
            assert isinstance(dest, Dest)

        # We have to account for the case where the destination exists and is a
        # symlink to something. Since we know the preprocessor is certainly not
        # going to create a symlink, we can just remove the existing one. If the
        # destination is not a symlink, we leave it alone, since we're going to
        # overwrite its contents anyway.
        # If symlinks aren't supported at all, we can skip this step.
        if hasattr(os, 'symlink'):
            if os.path.islink(dest.path):
                os.remove(dest.path)

        pp_deps = set(self.extra_depends)

        # If a dependency file was specified, and it exists, add any
        # dependencies from that file to our list.
        if self.depfile and os.path.exists(self.depfile):
            target = mozpath.normpath(dest.name)
            with open(self.depfile, 'rb') as fileobj:
                for rule in makeutil.read_dep_makefile(fileobj):
                    if target in rule.targets():
                        pp_deps.update(rule.dependencies())

        skip = False
        if dest.exists() and skip_if_older:
            # If a dependency file was specified, and it doesn't exist,
            # assume that the preprocessor needs to be rerun. That will
            # regenerate the dependency file.
            if self.depfile and not os.path.exists(self.depfile):
                skip = False
            else:
                skip = not BaseFile.any_newer(dest.path, pp_deps)

        if skip:
            return False

        deps_out = None
        if self.depfile:
            deps_out = FileAvoidWrite(self.depfile)
        pp = Preprocessor(defines=self.defines, marker=self.marker)
        pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings)

        with open(self.path, 'rU') as input:
            pp.processFile(input=input, output=dest, depfile=deps_out)

        dest.close()
        if self.depfile:
            deps_out.close()

        return True
示例#6
0
    def copy(self, dest, skip_if_older=True):
        '''
        Invokes the preprocessor to create the destination file.
        '''
        if isinstance(dest, basestring):
            dest = Dest(dest)
        else:
            assert isinstance(dest, Dest)

        # We have to account for the case where the destination exists and is a
        # symlink to something. Since we know the preprocessor is certainly not
        # going to create a symlink, we can just remove the existing one. If the
        # destination is not a symlink, we leave it alone, since we're going to
        # overwrite its contents anyway.
        # If symlinks aren't supported at all, we can skip this step.
        if hasattr(os, 'symlink'):
            if os.path.islink(dest.path):
                os.remove(dest.path)

        pp_deps = set(self.extra_depends)

        # If a dependency file was specified, and it exists, add any
        # dependencies from that file to our list.
        if self.depfile and os.path.exists(self.depfile):
            target = mozpath.normpath(dest.name)
            with open(self.depfile, 'rb') as fileobj:
                for rule in makeutil.read_dep_makefile(fileobj):
                    if target in rule.targets():
                        pp_deps.update(rule.dependencies())

        skip = False
        if dest.exists() and skip_if_older:
            # If a dependency file was specified, and it doesn't exist,
            # assume that the preprocessor needs to be rerun. That will
            # regenerate the dependency file.
            if self.depfile and not os.path.exists(self.depfile):
                skip = False
            else:
                skip = not BaseFile.any_newer(dest.path, pp_deps)

        if skip:
            return False

        deps_out = None
        if self.depfile:
            deps_out = FileAvoidWrite(self.depfile)
        pp = Preprocessor(defines=self.defines, marker=self.marker)

        with open(self.path, 'rU') as input:
            pp.processFile(input=input, output=dest, depfile=deps_out)

        dest.close()
        if self.depfile:
            deps_out.close()

        return True
示例#7
0
    def inputs(self):
        pp = Preprocessor(defines=self.defines, marker=self.marker)
        pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings)

        with open(self.path, 'rU') as input:
            with open(os.devnull, 'w') as output:
                pp.processFile(input=input, output=output)

        # This always yields at least self.path.
        return pp.includes
def emit_code(fd, pref_list_filename):
    pp = Preprocessor()
    pp.context.update(buildconfig.defines["ALLDEFINES"])

    # A necessary hack until MOZ_DEBUG_FLAGS are part of buildconfig.defines.
    if buildconfig.substs.get("MOZ_DEBUG"):
        pp.context["DEBUG"] = "1"

    if buildconfig.substs.get("CPU_ARCH") == "aarch64":
        pp.context["MOZ_AARCH64"] = True

    pp.out = StringIO()
    pp.do_filter("substitution")
    pp.do_include(pref_list_filename)

    try:
        pref_list = yaml.safe_load(pp.out.getvalue())
        input_file = os.path.relpath(
            pref_list_filename,
            os.environ.get("GECKO_PATH", os.environ.get("TOPSRCDIR")),
        )
        code = generate_code(pref_list, input_file)
    except (IOError, ValueError) as e:
        print("{}: error:\n  {}\n".format(pref_list_filename, e))
        sys.exit(1)

    # When generating multiple files from a script, the build system treats the
    # first named output file (StaticPrefListAll.h in this case) specially -- it
    # is created elsewhere, and written to via `fd`.
    fd.write(code["static_pref_list_all_h"])

    # We must create the remaining output files ourselves. This requires
    # creating the output directory directly if it doesn't already exist.
    ensureParentDir(fd.name)
    init_dirname = os.path.dirname(fd.name)

    with FileAvoidWrite("StaticPrefsAll.h") as fd:
        fd.write(code["static_prefs_all_h"])

    for group, text in sorted(code["static_pref_list_group_h"].items()):
        filename = "StaticPrefList_{}.h".format(group)
        with FileAvoidWrite(os.path.join(init_dirname, filename)) as fd:
            fd.write(text)

    for group, text in sorted(code["static_prefs_group_h"].items()):
        filename = "StaticPrefs_{}.h".format(group)
        with FileAvoidWrite(filename) as fd:
            fd.write(text)

    with FileAvoidWrite(os.path.join(init_dirname,
                                     "StaticPrefsCGetters.cpp")) as fd:
        fd.write(code["static_prefs_c_getters_cpp"])

    with FileAvoidWrite("static_prefs.rs") as fd:
        fd.write(code["static_prefs_rs"])
示例#9
0
def main(output, input_file):
    pp = Preprocessor()
    pp.context.update(buildconfig.defines['ALLDEFINES'])

    substs = buildconfig.substs

    # Substs taken verbatim.
    substs_vars = ('BIN_SUFFIX', )
    for var in substs_vars:
        pp.context[var] = '"%s"' % substs[var]

    # Derived values.
    for key, condition in (('IS_MAC', substs['OS_ARCH'] == 'Darwin'),
                           ('IS_LINUX', substs['OS_ARCH'] == 'Linux'),
                           ('IS_TEST_BUILD',
                            substs.get('ENABLE_TESTS') == '1'),
                           ('IS_DEBUG_BUILD', substs.get('MOZ_DEBUG') == '1'),
                           ('CRASHREPORTER', substs.get('MOZ_CRASHREPORTER')),
                           ('IS_ASAN', substs.get('MOZ_ASAN'))):
        if condition:
            pp.context[key] = '1'
        else:
            pp.context[key] = '0'

    pp.context.update({
        'XPC_BIN_PATH':
        '"%s/dist/bin"' % buildconfig.topobjdir,
        'CERTS_SRC_DIR':
        '"%s/build/pgo/certs"' % buildconfig.topsrcdir,
    })

    pp.out = output
    pp.do_include(input_file)
示例#10
0
    def __init__(self, outputFormat='flat', useJarfileManifest=True,
        useChromeManifest=False):

        self.outputFormat = outputFormat
        self.useJarfileManifest = useJarfileManifest
        self.useChromeManifest = useChromeManifest
        self.pp = Preprocessor()
        self.topsourcedir = None
        self.sourcedirs = []
        self.localedirs = None
        self.l10nbase = None
        self.l10nmerge = None
        self.relativesrcdir = None
        self.rootManifestAppId = None
示例#11
0
def process_package_overload(src, dst, version, app_buildid):
    ensureParentDir(dst)
    # First replace numeric version like '1.3'
    # Then replace with 'slashed' version like '1_4'
    # Finally set the full length addon version like 1.3.20131230
    defines = {
        "NUM_VERSION": version,
        "SLASH_VERSION": version.replace(".", "_"),
        "FULL_VERSION": ("%s.%s" % (version, app_buildid))
    }
    pp = Preprocessor(defines=defines)
    pp.do_filter("substitution")
    with open(dst, "w") as output:
        with open(src, "r") as input:
            pp.processFile(input=input, output=output)
示例#12
0
def main(output, input_file):
    pp = Preprocessor()
    pp.context.update({
        'FFI_EXEC_TRAMPOLINE_TABLE': '0',
        'HAVE_LONG_DOUBLE': '0',
        'TARGET': buildconfig.substs['FFI_TARGET'],
        'VERSION': '',
    })
    pp.do_filter('substitution')
    pp.setMarker(None)
    pp.out = output
    pp.do_include(input_file)
示例#13
0
    def do_if(self, expression, **kwargs):
        # The C preprocessor handles numbers following C rules, which is a
        # different handling than what our Preprocessor does out of the box.
        # Hack around it enough that the configure tests work properly.
        context = self.context

        def normalize_numbers(value):
            if isinstance(value, six.string_types):
                if value[-1:] == "L" and value[:-1].isdigit():
                    value = int(value[:-1])
            return value

        # Our Preprocessor doesn't handle macros with parameters, so we hack
        # around that for __has_feature()-like things.

        def normalize_has_feature_or_builtin(expr):
            return (self.HAS_FEATURE_OR_BUILTIN.sub(r"\1\2", expr).replace(
                "-", "_").replace("+", "_"))

        self.context = self.Context(
            (normalize_has_feature_or_builtin(k), normalize_numbers(v))
            for k, v in six.iteritems(context))
        try:
            return Preprocessor.do_if(
                self, normalize_has_feature_or_builtin(expression), **kwargs)
        finally:
            self.context = context
示例#14
0
    def do_if(self, expression, **kwargs):
        # The C preprocessor handles numbers following C rules, which is a
        # different handling than what our Preprocessor does out of the box.
        # Hack around it enough that the configure tests work properly.
        context = self.context

        def normalize_numbers(value):
            if isinstance(value, types.StringTypes):
                if value[-1:] == 'L' and value[:-1].isdigit():
                    value = int(value[:-1])
            return value

        # Our Preprocessor doesn't handle macros with parameters, so we hack
        # around that for __has_feature()-like things.
        def normalize_has_feature(expr):
            return self.HAS_FEATURE.sub(r'\1\2', expr)

        self.context = self.Context(
            (normalize_has_feature(k), normalize_numbers(v))
            for k, v in context.iteritems())
        try:
            return Preprocessor.do_if(self, normalize_has_feature(expression),
                                      **kwargs)
        finally:
            self.context = context
示例#15
0
def preprocess_file(src, dst, version, app_buildid, update_url):
    ensureParentDir(dst)

    defines = {
        "ADDON_ID": "fxos_" + version.replace(".", "_") + "*****@*****.**",
        # (reduce the app build id to only the build date
        # as addon manager doesn't handle big ints in addon versions)
        "ADDON_VERSION": ("%s.%s" % (version, app_buildid[:8])),
        "ADDON_NAME": "Firefox OS " + version + " Simulator",
        "ADDON_DESCRIPTION": "a Firefox OS " + version + " simulator",
        "ADDON_UPDATE_URL": update_url
    }
    pp = Preprocessor(defines=defines)
    pp.do_filter("substitution")
    with open(dst, "w") as output:
        with open(src, "r") as input:
            pp.processFile(input=input, output=output)
def main(output, input_file, *defines):
    pp = Preprocessor()
    pp.context.update({
        "FFI_EXEC_TRAMPOLINE_TABLE": "0",
        "HAVE_LONG_DOUBLE": "0",
        "TARGET": buildconfig.substs["FFI_TARGET"],
        "VERSION": "",
    })
    for d in defines:
        pp.context.update({d: "1"})
    pp.do_filter("substitution")
    pp.setMarker(None)
    pp.out = output
    pp.do_include(input_file)
示例#17
0
def main(output, input_file):
    pp = Preprocessor()
    pp.context.update(buildconfig.defines['ALLDEFINES'])

    substs = buildconfig.substs

    # Substs taken verbatim.
    substs_vars = (
        'BIN_SUFFIX',
    )
    for var in substs_vars:
        pp.context[var] = '"%s"' % substs[var]

    # Derived values.
    for key, condition in (
            ('IS_MAC', substs['OS_ARCH'] == 'Darwin'),
            ('IS_LINUX', substs['OS_ARCH'] == 'Linux'),
            ('IS_TEST_BUILD', substs.get('ENABLE_TESTS') == '1'),
            ('IS_DEBUG_BUILD', substs.get('MOZ_DEBUG') == '1'),
            ('CRASHREPORTER', substs.get('MOZ_CRASHREPORTER')),
            ('IS_ASAN', substs.get('MOZ_ASAN'))):
        if condition:
            pp.context[key] = '1'
        else:
            pp.context[key] = '0'

    pp.context.update({
        'XPC_BIN_PATH': '"%s/dist/bin"' % buildconfig.topobjdir,
        'CERTS_SRC_DIR': '"%s/build/pgo/certs"' % buildconfig.topsrcdir,
    })

    pp.out = output
    pp.do_include(input_file)
示例#18
0
def main():
    parser = argparse.ArgumentParser(description='Find duplicate files in directory.')
    parser.add_argument('--warning', '-w', action='store_true',
                        help='Only warn about duplicates, do not exit with an error')
    parser.add_argument('--file', '-f', action='append', dest='dupes_files', default=[],
                        help='Add exceptions to the duplicate list from this file')
    parser.add_argument('-D', action=DefinesAction)
    parser.add_argument('-U', action='append', default=[])
    parser.add_argument('directory',
                        help='The directory to check for duplicates in')

    args = parser.parse_args()

    allowed_dupes = []
    for filename in args.dupes_files:
        pp = Preprocessor()
        pp.context.update(buildconfig.defines['ALLDEFINES'])
        if args.D:
            pp.context.update(args.D)
        for undefine in args.U:
            if undefine in pp.context:
                del pp.context[undefine]
        pp.out = StringIO()
        pp.do_filter('substitution')
        pp.do_include(filename)
        allowed_dupes.extend([line.partition('#')[0].rstrip()
                              for line in pp.out.getvalue().splitlines()])

    find_dupes(args.directory, bail=not args.warning, allowed_dupes=allowed_dupes)
示例#19
0
def preprocess(path, defines):
    pp = Preprocessor(defines=defines, marker="%")
    pp.context.update(defines)
    pp.out = io.StringIO()
    pp.do_filter("substitution")
    pp.do_include(io.open(path, "r", encoding="latin1"))
    pp.out.seek(0)
    return pp.out
示例#20
0
def preprocess(path, defines):
    pp = Preprocessor(defines=defines, marker='%')
    pp.context.update(defines)
    pp.out = io.StringIO()
    pp.do_filter('substitution')
    pp.do_include(io.open(path, 'r', encoding='latin1'))
    pp.out.seek(0)
    return pp.out
示例#21
0
def main(output, input_file):
    pp = Preprocessor()
    pp.context.update({
        'FFI_EXEC_TRAMPOLINE_TABLE': '0',
        'HAVE_LONG_DOUBLE': '0',
        'TARGET': buildconfig.substs['FFI_TARGET'],
        'VERSION': '',
    })
    pp.do_filter('substitution')
    pp.setMarker(None)
    pp.out = output
    pp.do_include(input_file)
示例#22
0
def preprocess(input, parser, defines={}):
    '''
    Preprocess the file-like input with the given defines, and send the
    preprocessed output line by line to the given parser.
    '''
    pp = Preprocessor()
    pp.context.update(defines)
    pp.do_filter('substitution')
    pp.out = PreprocessorOutputWrapper(pp, parser)
    pp.do_include(input)
示例#23
0
def main(output, input_file):
    with open(input_file) as fh:
        if buildconfig.substs['EXPAND_LIBS_LIST_STYLE'] == 'linkerscript':
            def cleanup(line):
                assert line.startswith('INPUT("')
                assert line.endswith('")')
                return line[len('INPUT("'):-len('")')]

            objs = [cleanup(l.strip()) for l in fh.readlines()]
        else:
            objs = [l.strip() for l in fh.readlines()]

    pp = Preprocessor()
    pp.out = StringIO()
    pp.do_include(os.path.join(buildconfig.topobjdir, 'buildid.h'))
    buildid = pp.context['MOZ_BUILDID']
    output.write(
        'extern const char gToolkitBuildID[] = "%s";' % buildid
    )
    return set(o for o in objs
               if os.path.splitext(os.path.basename(o))[0] != 'buildid')
示例#24
0
def main(output, input_file):
    pp = Preprocessor()
    pp.context.update(
        {
            "FFI_EXEC_TRAMPOLINE_TABLE": "0",
            "HAVE_LONG_DOUBLE": "0",
            "TARGET": buildconfig.substs["FFI_TARGET"],
            "VERSION": "",
        }
    )
    pp.do_filter("substitution")
    pp.setMarker(None)
    pp.out = output
    pp.do_include(input_file)
示例#25
0
class TestLineEndings(unittest.TestCase):
    """
    Unit tests for the Context class
    """
    def setUp(self):
        self.pp = Preprocessor()
        self.pp.out = StringIO()
        self.f = NamedTemporaryFile(mode='wb')

    def tearDown(self):
        self.f.close()

    def createFile(self, lineendings):
        for line, ending in zip([b'a', b'#literal b', b'c'], lineendings):
            self.f.write(line + ending)
        self.f.flush()

    def testMac(self):
        self.createFile([b'\x0D'] * 3)
        self.pp.do_include(self.f.name)
        self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n')

    def testUnix(self):
        self.createFile([b'\x0A'] * 3)
        self.pp.do_include(self.f.name)
        self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n')

    def testWindows(self):
        self.createFile([b'\x0D\x0A'] * 3)
        self.pp.do_include(self.f.name)
        self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n')
示例#26
0
class TestLineEndings(unittest.TestCase):
  """
  Unit tests for the Context class
  """

  def setUp(self):
    self.pp = Preprocessor()
    self.pp.out = StringIO()
    self.tempnam = os.tempnam('.')

  def tearDown(self):
    os.remove(self.tempnam)

  def createFile(self, lineendings):
    f = open(self.tempnam, 'wb')
    for line, ending in zip(['a', '#literal b', 'c'], lineendings):
      f.write(line+ending)
    f.close()

  def testMac(self):
    self.createFile(['\x0D']*3)
    self.pp.do_include(self.tempnam)
    self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n')

  def testUnix(self):
    self.createFile(['\x0A']*3)
    self.pp.do_include(self.tempnam)
    self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n')

  def testWindows(self):
    self.createFile(['\x0D\x0A']*3)
    self.pp.do_include(self.tempnam)
    self.assertEquals(self.pp.out.getvalue(), 'a\nb\nc\n')
示例#27
0
文件: jar.py 项目: paulmadore/luckyde
    def __init__(self, outputFormat="flat", useJarfileManifest=True, useChromeManifest=False):

        self.outputFormat = outputFormat
        self.useJarfileManifest = useJarfileManifest
        self.useChromeManifest = useChromeManifest
        self.pp = Preprocessor()
        self.topsourcedir = None
        self.sourcedirs = []
        self.localedirs = None
        self.l10nbase = None
        self.l10nmerge = None
        self.relativesrcdir = None
        self.rootManifestAppId = None
示例#28
0
def emit_code(fd, pref_list_filename):
    pp = Preprocessor()
    pp.context.update(buildconfig.defines['ALLDEFINES'])

    # A necessary hack until MOZ_DEBUG_FLAGS are part of buildconfig.defines.
    if buildconfig.substs.get('MOZ_DEBUG'):
        pp.context['DEBUG'] = '1'

    pp.out = BytesIO()
    pp.do_filter('substitution')
    pp.do_include(pref_list_filename)

    try:
        pref_list = yaml.safe_load(pp.out.getvalue())
        code = generate_code(pref_list)
    except (IOError, ValueError) as e:
        print('{}: error:\n  {}\n'.format(pref_list_filename, e))
        sys.exit(1)

    # When generating multiple files from a script, the build system treats the
    # first named output file (StaticPrefListAll.h in this case) specially -- it
    # is created elsewhere, and written to via `fd`.
    fd.write(code['static_pref_list_all_h'])

    # We must create the remaining output files ourselves. This requires
    # creating the output directory directly if it doesn't already exist.
    ensureParentDir(fd.name)
    init_dirname = os.path.dirname(fd.name)

    with FileAvoidWrite('StaticPrefsAll.h') as fd:
        fd.write(code['static_prefs_all_h'])

    for group, text in sorted(code['static_pref_list_group_h'].items()):
        filename = 'StaticPrefList_{}.h'.format(group)
        with FileAvoidWrite(os.path.join(init_dirname, filename)) as fd:
            fd.write(text)

    for group, text in sorted(code['static_prefs_group_h'].items()):
        filename = 'StaticPrefs_{}.h'.format(group)
        with FileAvoidWrite(filename) as fd:
            fd.write(text)

    with FileAvoidWrite(os.path.join(init_dirname,
                                     'StaticPrefsCGetters.cpp')) as fd:
        fd.write(code['static_prefs_c_getters_cpp'])

    with FileAvoidWrite('static_prefs.rs') as fd:
        fd.write(code['static_prefs_rs'])
示例#29
0
    def inputs(self):
        pp = Preprocessor(defines=self.defines, marker=self.marker)
        pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings)

        with open(self.path, 'rU') as input:
            with open(os.devnull, 'w') as output:
                pp.processFile(input=input, output=output)

        # This always yields at least self.path.
        return pp.includes
示例#30
0
def preprocess(input, parser, defines={}):
    '''
    Preprocess the file-like input with the given defines, and send the
    preprocessed output line by line to the given parser.
    '''
    pp = Preprocessor()
    pp.context.update(defines)
    pp.do_filter('substitution')
    pp.out = PreprocessorOutputWrapper(pp, parser)
    pp.do_include(input)
示例#31
0
 def do_if(self, *args, **kwargs):
     # The C preprocessor handles numbers following C rules, which is a
     # different handling than what our Preprocessor does out of the box.
     # Hack around it enough that the configure tests work properly.
     context = self.context
     def normalize_numbers(value):
         if isinstance(value, types.StringTypes):
             if value[-1:] == 'L' and value[:-1].isdigit():
                 value = int(value[:-1])
         return value
     self.context = self.Context(
         (k, normalize_numbers(v)) for k, v in context.iteritems()
     )
     try:
         return Preprocessor.do_if(self, *args, **kwargs)
     finally:
         self.context = context
示例#32
0
    def _get_preprocessor(self, output, extra):
        '''Returns a preprocessor for use by create_config_file and
        create_makefile.
        '''
        path = output.name
        pp = Preprocessor()
        pp.context.update(self.substs)
        pp.context.update(top_srcdir=self.get_top_srcdir(path))
        pp.context.update(srcdir=self.get_file_srcdir(path))
        pp.context.update(relativesrcdir=self.get_relative_srcdir(path))
        pp.context.update(DEPTH=self.get_depth(path))
        if extra:
            pp.context.update(extra)
        pp.do_filter('attemptSubstitution')
        pp.setMarker(None)

        pp.out = output
        return pp
示例#33
0
def main():
    parser = argparse.ArgumentParser(
        description="Find duplicate files in directory.")
    parser.add_argument(
        "--warning",
        "-w",
        action="store_true",
        help="Only warn about duplicates, do not exit with an error",
    )
    parser.add_argument(
        "--file",
        "-f",
        action="append",
        dest="dupes_files",
        default=[],
        help="Add exceptions to the duplicate list from this file",
    )
    parser.add_argument("-D", action=DefinesAction)
    parser.add_argument("-U", action="append", default=[])
    parser.add_argument("directory",
                        help="The directory to check for duplicates in")

    args = parser.parse_args()

    allowed_dupes = []
    for filename in args.dupes_files:
        pp = Preprocessor()
        pp.context.update(buildconfig.defines["ALLDEFINES"])
        if args.D:
            pp.context.update(args.D)
        for undefine in args.U:
            if undefine in pp.context:
                del pp.context[undefine]
        pp.out = StringIO()
        pp.do_filter("substitution")
        pp.do_include(filename)
        allowed_dupes.extend([
            line.partition("#")[0].rstrip()
            for line in pp.out.getvalue().splitlines()
        ])

    find_dupes(args.directory,
               bail=not args.warning,
               allowed_dupes=allowed_dupes)
示例#34
0
def process_package_overload(src, dst, version, app_buildid):
    ensureParentDir(dst)
    # First replace numeric version like '1.3'
    # Then replace with 'slashed' version like '1_4'
    # Finally set the full length addon version like 1.3.20131230
    defines = {
        "NUM_VERSION": version,
        "SLASH_VERSION": version.replace(".", "_"),
        "FULL_VERSION": ("%s.%s" % (version, app_buildid))
    }
    pp = Preprocessor(defines=defines)
    pp.do_filter("substitution")
    with open(dst, "w") as output:
        with open(src, "r") as input:
            pp.processFile(input=input, output=output)
示例#35
0
def preprocess_file(src, dst, version, app_buildid, update_url):
    ensureParentDir(dst)

    defines = {
        "ADDON_ID": "fxos_" + version.replace(".", "_") + "*****@*****.**",
        # (reduce the app build id to only the build date
        # as addon manager doesn't handle big ints in addon versions)
        "ADDON_VERSION": ("%s.%s" % (version, app_buildid[:8])),
        "ADDON_NAME": "Firefox OS " + version + " Simulator",
        "ADDON_DESCRIPTION": "a Firefox OS " + version + " simulator",
        "ADDON_UPDATE_URL": update_url
    }
    pp = Preprocessor(defines=defines)
    pp.do_filter("substitution")
    with open(dst, "w") as output:
        with open(src, "r") as input:
            pp.processFile(input=input, output=output)
 def do_if(self, expression, **kwargs):
     # The C preprocessor handles numbers following C rules, which is a
     # different handling than what our Preprocessor does out of the box.
     # Hack around it enough that the configure tests work properly.
     context = self.context
     def normalize_numbers(value):
         if isinstance(value, types.StringTypes):
             if value[-1:] == 'L' and value[:-1].isdigit():
                 value = int(value[:-1])
         return value
     # Our Preprocessor doesn't handle macros with parameters, so we hack
     # around that for __has_feature()-like things.
     def normalize_has_feature(expr):
         return self.HAS_FEATURE.sub(r'\1\2', expr)
     self.context = self.Context(
         (normalize_has_feature(k), normalize_numbers(v))
         for k, v in context.iteritems()
     )
     try:
         return Preprocessor.do_if(self, normalize_has_feature(expression),
                                   **kwargs)
     finally:
         self.context = context
示例#37
0
    def _get_preprocessor(self, output, extra):
        '''Returns a preprocessor for use by create_config_file and
        create_makefile.
        '''
        path = output.name
        pp = Preprocessor()
        pp.context.update(self.substs)
        pp.context.update(top_srcdir = self.get_top_srcdir(path))
        pp.context.update(srcdir = self.get_file_srcdir(path))
        pp.context.update(relativesrcdir = self.get_relative_srcdir(path))
        pp.context.update(DEPTH = self.get_depth(path))
        if extra:
            pp.context.update(extra)
        pp.do_filter('attemptSubstitution')
        pp.setMarker(None)

        pp.out = output
        return pp
def load_yaml(yaml_path):
    # First invoke preprocessor.py so that we can use #ifdef JS_SIMULATOR in
    # the YAML file.
    pp = Preprocessor()
    pp.context.update(buildconfig.defines['ALLDEFINES'])
    pp.out = six.StringIO()
    pp.do_filter('substitution')
    pp.do_include(yaml_path)
    contents = pp.out.getvalue()

    # Load into an OrderedDict to ensure order is preserved. Note: Python 3.7+
    # also preserves ordering for normal dictionaries.
    # Code based on https://stackoverflow.com/a/21912744.
    class OrderedLoader(yaml.Loader):
        pass

    def construct_mapping(loader, node):
        loader.flatten_mapping(node)
        return OrderedDict(loader.construct_pairs(node))

    tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
    OrderedLoader.add_constructor(tag, construct_mapping)
    return yaml.load(contents, OrderedLoader)
示例#39
0
 def setUp(self):
     self.pp = Preprocessor()
     self.pp.out = StringIO()
示例#40
0
 def setUp(self):
   self.pp = Preprocessor()
   self.pp.out = StringIO()
   self.tempnam = os.tempnam('.')
示例#41
0
def rnp_preprocess(tmpl, dest, defines):
    """
    Generic preprocessing
    :param BinaryIO tmpl: open filehandle (read) input
    :param BinaryIO dest: open filehandle (write) output
    :param dict defines: result of get_defines()
    :return boolean:
    """
    pp = Preprocessor()
    pp.setMarker("%")
    pp.addDefines(defines)
    pp.do_filter("substitution")
    pp.out = dest
    pp.do_include(tmpl, True)
    return True
示例#42
0
class TestPreprocessor(unittest.TestCase):
    """
    Unit tests for the Context class
    """
    def setUp(self):
        self.pp = Preprocessor()
        self.pp.out = StringIO()

    def do_include_compare(self, content_lines, expected_lines):
        content = '%s' % '\n'.join(content_lines)
        expected = '%s'.rstrip() % '\n'.join(expected_lines)

        with MockedOpen({'dummy': content}):
            self.pp.do_include('dummy')
            self.assertEqual(self.pp.out.getvalue().rstrip('\n'), expected)

    def do_include_pass(self, content_lines):
        self.do_include_compare(content_lines, ['PASS'])

    def test_conditional_if_0(self):
        self.do_include_pass([
            '#if 0',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_no_marker(self):
        lines = [
            '#if 0',
            'PASS',
            '#endif',
        ]
        self.pp.setMarker(None)
        self.do_include_compare(lines, lines)

    def test_string_value(self):
        self.do_include_compare([
            '#define FOO STRING',
            '#if FOO',
            'string value is true',
            '#else',
            'string value is false',
            '#endif',
        ], ['string value is false'])

    def test_number_value(self):
        self.do_include_compare([
            '#define FOO 1',
            '#if FOO',
            'number value is true',
            '#else',
            'number value is false',
            '#endif',
        ], ['number value is true'])

    def test_conditional_if_0_elif_1(self):
        self.do_include_pass([
            '#if 0',
            '#elif 1',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_conditional_if_1(self):
        self.do_include_pass([
            '#if 1',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_conditional_if_0_or_1(self):
        self.do_include_pass([
            '#if 0 || 1',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_conditional_if_1_elif_1_else(self):
        self.do_include_pass([
            '#if 1',
            'PASS',
            '#elif 1',
            'FAIL',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_conditional_if_1_if_1(self):
        self.do_include_pass([
            '#if 1',
            '#if 1',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_conditional_not_0(self):
        self.do_include_pass([
            '#if !0',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_conditional_not_0_and_1(self):
        self.do_include_pass([
            '#if !0 && !1',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_conditional_not_1(self):
        self.do_include_pass([
            '#if !1',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_conditional_not_emptyval(self):
        self.do_include_compare([
            '#define EMPTYVAL',
            '#ifndef EMPTYVAL',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
            '#ifdef EMPTYVAL',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ], ['PASS', 'PASS'])

    def test_conditional_not_nullval(self):
        self.do_include_pass([
            '#define NULLVAL 0',
            '#if !NULLVAL',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_expand(self):
        self.do_include_pass([
            '#define ASVAR AS',
            '#expand P__ASVAR__S',
        ])

    def test_undef_defined(self):
        self.do_include_compare([
            '#define BAR',
            '#undef BAR',
            'BAR',
        ], ['BAR'])

    def test_undef_undefined(self):
        self.do_include_compare([
            '#undef BAR',
        ], [])

    def test_filter_attemptSubstitution(self):
        self.do_include_compare([
            '#filter attemptSubstitution',
            '@PASS@',
            '#unfilter attemptSubstitution',
        ], ['@PASS@'])

    def test_filter_emptyLines(self):
        self.do_include_compare([
            'lines with a',
            '',
            'blank line',
            '#filter emptyLines',
            'lines with',
            '',
            'no blank lines',
            '#unfilter emptyLines',
            'yet more lines with',
            '',
            'blank lines',
        ], [
            'lines with a',
            '',
            'blank line',
            'lines with',
            'no blank lines',
            'yet more lines with',
            '',
            'blank lines',
        ])

    def test_filter_slashslash(self):
        self.do_include_compare([
            '#filter slashslash',
            'PASS//FAIL  // FAIL',
            '#unfilter slashslash',
            'PASS // PASS',
        ], [
            'PASS',
            'PASS // PASS',
        ])

    def test_filter_spaces(self):
        self.do_include_compare([
            '#filter spaces',
            'You should see two nice ascii tables',
            ' +-+-+-+',
            ' | |   |     |',
            ' +-+-+-+',
            '#unfilter spaces',
            '+-+---+',
            '| |   |',
            '+-+---+',
        ], [
            'You should see two nice ascii tables',
            '+-+-+-+',
            '| | | |',
            '+-+-+-+',
            '+-+---+',
            '| |   |',
            '+-+---+',
        ])

    def test_filter_substitution(self):
        self.do_include_pass([
            '#define VAR ASS',
            '#filter substitution',
            'P@VAR@',
            '#unfilter substitution',
        ])

    def test_error(self):
        with MockedOpen({'f': '#error spit this message out\n'}):
            with self.assertRaises(Preprocessor.Error) as e:
                self.pp.do_include('f')
                self.assertEqual(e.args[0][-1], 'spit this message out')

    def test_javascript_line(self):
        # The preprocessor is reading the filename from somewhere not caught
        # by MockedOpen.
        tmpdir = mkdtemp()
        try:
            full = os.path.join(tmpdir, 'javascript_line.js.in')
            with open(full, 'w') as fh:
                fh.write('\n'.join([
                    '// Line 1',
                    '#if 0',
                    '// line 3',
                    '#endif',
                    '// line 5',
                    '# comment',
                    '// line 7',
                    '// line 8',
                    '// line 9',
                    '# another comment',
                    '// line 11',
                    '#define LINE 1',
                    '// line 13, given line number overwritten with 2',
                    '',
                ]))

            self.pp.do_include(full)
            out = '\n'.join([
                '// Line 1',
                '//@line 5 "CWDjavascript_line.js.in"',
                '// line 5',
                '//@line 7 "CWDjavascript_line.js.in"',
                '// line 7',
                '// line 8',
                '// line 9',
                '//@line 11 "CWDjavascript_line.js.in"',
                '// line 11',
                '//@line 2 "CWDjavascript_line.js.in"',
                '// line 13, given line number overwritten with 2',
                '',
            ])
            out = out.replace('CWD', tmpdir + os.path.sep)
            self.assertEqual(self.pp.out.getvalue(), out)
        finally:
            shutil.rmtree(tmpdir)

    def test_literal(self):
        self.do_include_pass([
            '#literal PASS',
        ])

    def test_var_directory(self):
        self.do_include_pass([
            '#ifdef DIRECTORY',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_file(self):
        self.do_include_pass([
            '#ifdef FILE',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_if_0(self):
        self.do_include_pass([
            '#define VAR 0',
            '#if VAR',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_var_if_0_elifdef(self):
        self.do_include_pass([
            '#if 0',
            '#elifdef FILE',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_if_0_elifndef(self):
        self.do_include_pass([
            '#if 0',
            '#elifndef VAR',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_ifdef_0(self):
        self.do_include_pass([
            '#define VAR 0',
            '#ifdef VAR',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_ifdef_1_or_undef(self):
        self.do_include_pass([
            '#define FOO 1',
            '#if defined(FOO) || defined(BAR)',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_ifdef_undef(self):
        self.do_include_pass([
            '#define VAR 0',
            '#undef VAR',
            '#ifdef VAR',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_var_ifndef_0(self):
        self.do_include_pass([
            '#define VAR 0',
            '#ifndef VAR',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_var_ifndef_0_and_undef(self):
        self.do_include_pass([
            '#define FOO 0',
            '#if !defined(FOO) && !defined(BAR)',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_var_ifndef_undef(self):
        self.do_include_pass([
            '#define VAR 0',
            '#undef VAR',
            '#ifndef VAR',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_line(self):
        self.do_include_pass([
            '#ifdef LINE',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_filterDefine(self):
        self.do_include_pass([
            '#filter substitution',
            '#define VAR AS',
            '#define VAR2 P@VAR@',
            '@VAR2@S',
        ])

    def test_number_value_equals(self):
        self.do_include_pass([
            '#define FOO 1000',
            '#if FOO == 1000',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_default_defines(self):
        self.pp.handleCommandLine(["-DFOO"])
        self.do_include_pass([
            '#if FOO == 1',
            'PASS',
            '#else',
            'FAIL',
        ])

    def test_number_value_equals_defines(self):
        self.pp.handleCommandLine(["-DFOO=1000"])
        self.do_include_pass([
            '#if FOO == 1000',
            'PASS',
            '#else',
            'FAIL',
        ])

    def test_octal_value_equals(self):
        self.do_include_pass([
            '#define FOO 0100',
            '#if FOO == 0100',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_octal_value_equals_defines(self):
        self.pp.handleCommandLine(["-DFOO=0100"])
        self.do_include_pass([
            '#if FOO == 0100',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_value_quoted_expansion(self):
        """
        Quoted values on the commandline don't currently have quotes stripped.
        Pike says this is for compat reasons.
        """
        self.pp.handleCommandLine(['-DFOO="ABCD"'])
        self.do_include_compare([
            '#filter substitution',
            '@FOO@',
        ], ['"ABCD"'])

    def test_octal_value_quoted_expansion(self):
        self.pp.handleCommandLine(['-DFOO="0100"'])
        self.do_include_compare([
            '#filter substitution',
            '@FOO@',
        ], ['"0100"'])

    def test_number_value_not_equals_quoted_defines(self):
        self.pp.handleCommandLine(['-DFOO="1000"'])
        self.do_include_pass([
            '#if FOO == 1000',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_octal_value_not_equals_quoted_defines(self):
        self.pp.handleCommandLine(['-DFOO="0100"'])
        self.do_include_pass([
            '#if FOO == 0100',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_undefined_variable(self):
        with MockedOpen({'f': '#filter substitution\n@foo@'}):
            with self.assertRaises(Preprocessor.Error) as e:
                self.pp.do_include('f')
                self.assertEqual(e.key, 'UNDEFINED_VAR')

    def test_include(self):
        files = {
            'foo/test':
            '\n'.join([
                '#define foo foobarbaz',
                '#include @inc@',
                '@bar@',
                '',
            ]),
            'bar':
            '\n'.join([
                '#define bar barfoobaz',
                '@foo@',
                '',
            ]),
            'f':
            '\n'.join([
                '#filter substitution',
                '#define inc ../bar',
                '#include foo/test',
                '',
            ]),
        }

        with MockedOpen(files):
            self.pp.do_include('f')
            self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\nbarfoobaz\n')

    def test_include_line(self):
        files = {
            'srcdir/test.js':
            '\n'.join([
                '#define foo foobarbaz',
                '#include @inc@',
                '@bar@',
                '',
            ]),
            'srcdir/bar.js':
            '\n'.join([
                '#define bar barfoobaz',
                '@foo@',
                '',
            ]),
            'srcdir/foo.js':
            '\n'.join([
                'bazfoobar',
                '#include bar.js',
                'bazbarfoo',
                '',
            ]),
            'objdir/baz.js':
            'baz\n',
            'srcdir/f.js':
            '\n'.join([
                '#include foo.js',
                '#filter substitution',
                '#define inc bar.js',
                '#include test.js',
                '#include ../objdir/baz.js',
                'fin',
                '',
            ]),
        }

        preprocessed = ('//@line 1 "$SRCDIR/foo.js"\n'
                        'bazfoobar\n'
                        '//@line 2 "$SRCDIR/bar.js"\n'
                        '@foo@\n'
                        '//@line 3 "$SRCDIR/foo.js"\n'
                        'bazbarfoo\n'
                        '//@line 2 "$SRCDIR/bar.js"\n'
                        'foobarbaz\n'
                        '//@line 3 "$SRCDIR/test.js"\n'
                        'barfoobaz\n'
                        '//@line 1 "$OBJDIR/baz.js"\n'
                        'baz\n'
                        '//@line 6 "$SRCDIR/f.js"\n'
                        'fin\n').replace('DIR/', 'DIR' + os.sep)

        # Try with separate srcdir/objdir
        with MockedOpen(files):
            self.pp.topsrcdir = os.path.abspath('srcdir')
            self.pp.topobjdir = os.path.abspath('objdir')
            self.pp.do_include('srcdir/f.js')
            self.assertEqual(self.pp.out.getvalue(), preprocessed)

        # Try again with relative objdir
        self.setUp()
        files['srcdir/objdir/baz.js'] = files['objdir/baz.js']
        del files['objdir/baz.js']
        files['srcdir/f.js'] = files['srcdir/f.js'].replace('../', '')
        with MockedOpen(files):
            self.pp.topsrcdir = os.path.abspath('srcdir')
            self.pp.topobjdir = os.path.abspath('srcdir/objdir')
            self.pp.do_include('srcdir/f.js')
            self.assertEqual(self.pp.out.getvalue(), preprocessed)

    def test_include_missing_file(self):
        with MockedOpen({'f': '#include foo\n'}):
            with self.assertRaises(Preprocessor.Error) as e:
                self.pp.do_include('f')
            self.assertEqual(e.exception.key, 'FILE_NOT_FOUND')

    def test_include_undefined_variable(self):
        with MockedOpen({'f': '#filter substitution\n#include @foo@\n'}):
            with self.assertRaises(Preprocessor.Error) as e:
                self.pp.do_include('f')
            self.assertEqual(e.exception.key, 'UNDEFINED_VAR')

    def test_include_literal_at(self):
        files = {
            '@foo@': '#define foo foobarbaz\n',
            'f': '#include @foo@\n#filter substitution\n@foo@\n',
        }

        with MockedOpen(files):
            self.pp.do_include('f')
            self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\n')

    def test_command_line_literal_at(self):
        with MockedOpen({"@[email protected]": '@foo@\n'}):
            self.pp.handleCommandLine(
                ['-Fsubstitution', '-Dfoo=foobarbaz', '@[email protected]'])
            self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\n')

    def test_invalid_ifdef(self):
        with MockedOpen({'dummy': '#ifdef FOO == BAR\nPASS\n#endif'}):
            with self.assertRaises(Preprocessor.Error) as e:
                self.pp.do_include('dummy')
            self.assertEqual(e.exception.key, 'INVALID_VAR')

        with MockedOpen({'dummy': '#ifndef FOO == BAR\nPASS\n#endif'}):
            with self.assertRaises(Preprocessor.Error) as e:
                self.pp.do_include('dummy')
            self.assertEqual(e.exception.key, 'INVALID_VAR')

        # Trailing whitespaces, while not nice, shouldn't be an error.
        self.do_include_pass([
            '#ifndef  FOO ',
            'PASS',
            '#endif',
        ])
示例#43
0
    def _consume_jar_manifest(self, obj, defines):
        # Ideally, this would all be handled somehow in the emitter, but
        # this would require all the magic surrounding l10n and addons in
        # the recursive make backend to die, which is not going to happen
        # any time soon enough.
        # Notably missing:
        # - DEFINES from config/config.mk
        # - L10n support
        # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in
        #   moz.build, but it doesn't matter in dist/bin.
        pp = Preprocessor()
        pp.context.update(defines)
        pp.context.update(self.environment.defines)
        pp.context.update(
            AB_CD='en-US',
            BUILD_FASTER=1,
        )
        pp.out = JarManifestParser()
        pp.do_include(obj.path)
        self.backend_input_files |= pp.includes

        for jarinfo in pp.out:
            install_target = obj.install_target
            if jarinfo.base:
                install_target = mozpath.normpath(
                    mozpath.join(install_target, jarinfo.base))
            for e in jarinfo.entries:
                if e.is_locale:
                    if jarinfo.relativesrcdir:
                        path = mozpath.join(self.environment.topsrcdir,
                                            jarinfo.relativesrcdir)
                    else:
                        path = mozpath.dirname(obj.path)
                    src = mozpath.join( path, 'en-US', e.source)
                elif e.source.startswith('/'):
                    src = mozpath.join(self.environment.topsrcdir,
                                       e.source[1:])
                else:
                    src = mozpath.join(mozpath.dirname(obj.path), e.source)

                if '*' in e.source:
                    if e.preprocess:
                        raise Exception('%s: Wildcards are not supported with '
                                        'preprocessing' % obj.path)
                    def _prefix(s):
                        for p in s.split('/'):
                            if '*' not in p:
                                yield p + '/'
                    prefix = ''.join(_prefix(src))

                    self._install_manifests[install_target] \
                        .add_pattern_symlink(
                        prefix,
                        src[len(prefix):],
                        mozpath.join(jarinfo.name, e.output))
                    continue

                if not os.path.exists(src):
                    if e.is_locale:
                        raise Exception(
                            '%s: Cannot find %s' % (obj.path, e.source))
                    if e.source.startswith('/'):
                        src = mozpath.join(self.environment.topobjdir,
                                           e.source[1:])
                    else:
                        # This actually gets awkward if the jar.mn is not
                        # in the same directory as the moz.build declaring
                        # it, but it's how it works in the recursive make,
                        # not that anything relies on that, but it's simpler.
                        src = mozpath.join(obj.objdir, e.source)
                    self._dependencies['install-%s' % install_target] \
                        .append(mozpath.relpath(
                        src, self.environment.topobjdir))

                if e.preprocess:
                    kwargs = {}
                    if src.endswith('.css'):
                        kwargs['marker'] = '%'
                    self._add_preprocess(
                        obj,
                        src,
                        mozpath.join(jarinfo.name, mozpath.dirname(e.output)),
                        mozpath.basename(e.output),
                        defines=defines,
                        **kwargs)
                else:
                    self._install_manifests[install_target].add_symlink(
                        src,
                        mozpath.join(jarinfo.name, e.output))

            manifest = mozpath.normpath(mozpath.join(install_target,
                                                     jarinfo.name))
            manifest += '.manifest'
            for m in jarinfo.chrome_manifests:
                self._manifest_entries[manifest].add(
                    m.replace('%', mozpath.basename(jarinfo.name) + '/'))

            if jarinfo.name != 'chrome':
                manifest = mozpath.normpath(mozpath.join(install_target,
                                                         'chrome.manifest'))
                entry = 'manifest %s.manifest' % jarinfo.name
                self._manifest_entries[manifest].add(entry)
示例#44
0
         STYLE_CONSTRUCTOR=1,
         TINT_FOREGROUND_DRAWABLE=1,
         BOOKMARK_NO_TINT=1),
    dict(VIEW_NAME_SUFFIX='ImageView',
         BASE_TYPE='android.widget.ImageView',
         STYLE_CONSTRUCTOR=1,
         TINT_FOREGROUND_DRAWABLE=1),
    dict(VIEW_NAME_SUFFIX='LinearLayout',
         BASE_TYPE='android.widget.LinearLayout'),
    dict(VIEW_NAME_SUFFIX='RelativeLayout',
         BASE_TYPE='android.widget.RelativeLayout',
         STYLE_CONSTRUCTOR=1),
    dict(VIEW_NAME_SUFFIX='TextSwitcher',
         BASE_TYPE='android.widget.TextSwitcher'),
    dict(VIEW_NAME_SUFFIX='TextView',
         BASE_TYPE='android.widget.TextView',
         STYLE_CONSTRUCTOR=1),
    dict(VIEW_NAME_SUFFIX='View',
         BASE_TYPE='android.view.View',
         STYLE_CONSTRUCTOR=1),
]

for view in views:
    pp = Preprocessor(defines=view, marker='//#')

    dest = os.path.join(__DIR__, dest_format_string % view)
    with open(template, 'rU') as input:
        with open(dest, 'wt') as output:
            pp.processFile(input=input, output=output)
            print('%s' % dest)
示例#45
0
文件: jar.py 项目: luke-chang/gecko-1
class JarMaker(object):
    '''JarMaker reads jar.mn files and process those into jar files or
      flat directories, along with chrome.manifest files.
      '''

    def __init__(self, outputFormat='flat', useJarfileManifest=True,
        useChromeManifest=False):

        self.outputFormat = outputFormat
        self.useJarfileManifest = useJarfileManifest
        self.useChromeManifest = useChromeManifest
        self.pp = Preprocessor()
        self.topsourcedir = None
        self.sourcedirs = []
        self.localedirs = None
        self.l10nbase = None
        self.l10nmerge = None
        self.relativesrcdir = None
        self.rootManifestAppId = None
        self._seen_output = set()

    def getCommandLineParser(self):
        '''Get a optparse.OptionParser for jarmaker.

        This OptionParser has the options for jarmaker as well as
        the options for the inner PreProcessor.
        '''

        # HACK, we need to unescape the string variables we get,
        # the perl versions didn't grok strings right

        p = self.pp.getCommandLineParser(unescapeDefines=True)
        p.add_option('-f', type='choice', default='jar',
            choices=('jar', 'flat', 'symlink'),
            help='fileformat used for output',
            metavar='[jar, flat, symlink]',
            )
        p.add_option('-v', action='store_true', dest='verbose',
                     help='verbose output')
        p.add_option('-q', action='store_false', dest='verbose',
                     help='verbose output')
        p.add_option('-e', action='store_true',
                     help='create chrome.manifest instead of jarfile.manifest'
                     )
        p.add_option('-s', type='string', action='append', default=[],
                     help='source directory')
        p.add_option('-t', type='string', help='top source directory')
        p.add_option('-c', '--l10n-src', type='string', action='append'
                     , help='localization directory')
        p.add_option('--l10n-base', type='string', action='store',
                     help='base directory to be used for localization (requires relativesrcdir)'
                     )
        p.add_option('--locale-mergedir', type='string', action='store'
                     ,
                     help='base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)'
                     )
        p.add_option('--relativesrcdir', type='string',
                     help='relativesrcdir to be used for localization')
        p.add_option('-d', type='string', help='base directory')
        p.add_option('--root-manifest-entry-appid', type='string',
                     help='add an app id specific root chrome manifest entry.'
                     )
        return p

    def finalizeJar(self, jardir, jarbase, jarname, chromebasepath, register, doZip=True):
        '''Helper method to write out the chrome registration entries to
         jarfile.manifest or chrome.manifest, or both.

        The actual file processing is done in updateManifest.
        '''

        # rewrite the manifest, if entries given
        if not register:
            return

        chromeManifest = os.path.join(jardir, jarbase, 'chrome.manifest')

        if self.useJarfileManifest:
            self.updateManifest(os.path.join(jardir, jarbase,
                                             jarname + '.manifest'),
                                chromebasepath.format(''), register)
            if jarname != 'chrome':
                addEntriesToListFile(chromeManifest,
                                     ['manifest {0}.manifest'.format(jarname)])
        if self.useChromeManifest:
            chromebase = os.path.dirname(jarname) + '/'
            self.updateManifest(chromeManifest,
                                chromebasepath.format(chromebase), register)

        # If requested, add a root chrome manifest entry (assumed to be in the parent directory
        # of chromeManifest) with the application specific id. In cases where we're building
        # lang packs, the root manifest must know about application sub directories.

        if self.rootManifestAppId:
            rootChromeManifest = \
                os.path.join(os.path.normpath(os.path.dirname(chromeManifest)),
                             '..', 'chrome.manifest')
            rootChromeManifest = os.path.normpath(rootChromeManifest)
            chromeDir = \
                os.path.basename(os.path.dirname(os.path.normpath(chromeManifest)))
            logging.info("adding '%s' entry to root chrome manifest appid=%s"
                          % (chromeDir, self.rootManifestAppId))
            addEntriesToListFile(rootChromeManifest,
                                 ['manifest %s/chrome.manifest application=%s'
                                  % (chromeDir,
                                 self.rootManifestAppId)])

    def updateManifest(self, manifestPath, chromebasepath, register):
        '''updateManifest replaces the % in the chrome registration entries
        with the given chrome base path, and updates the given manifest file.
        '''
        myregister = dict.fromkeys(map(lambda s: s.replace('%',
            chromebasepath), register))
        addEntriesToListFile(manifestPath, myregister.iterkeys())

    def makeJar(self, infile, jardir):
        '''makeJar is the main entry point to JarMaker.

        It takes the input file, the output directory, the source dirs and the
        top source dir as argument, and optionally the l10n dirs.
        '''

        # making paths absolute, guess srcdir if file and add to sourcedirs
        _normpath = lambda p: os.path.normpath(os.path.abspath(p))
        self.topsourcedir = _normpath(self.topsourcedir)
        self.sourcedirs = [_normpath(p) for p in self.sourcedirs]
        if self.localedirs:
            self.localedirs = [_normpath(p) for p in self.localedirs]
        elif self.relativesrcdir:
            self.localedirs = \
                self.generateLocaleDirs(self.relativesrcdir)
        if isinstance(infile, basestring):
            logging.info('processing ' + infile)
            self.sourcedirs.append(_normpath(os.path.dirname(infile)))
        pp = self.pp.clone()
        pp.out = JarManifestParser()
        pp.do_include(infile)

        for info in pp.out:
            self.processJarSection(info, jardir)

    def generateLocaleDirs(self, relativesrcdir):
        if os.path.basename(relativesrcdir) == 'locales':
            # strip locales
            l10nrelsrcdir = os.path.dirname(relativesrcdir)
        else:
            l10nrelsrcdir = relativesrcdir
        locdirs = []

        # generate locales dirs, merge, l10nbase, en-US
        if self.l10nmerge:
            locdirs.append(os.path.join(self.l10nmerge, l10nrelsrcdir))
        if self.l10nbase:
            locdirs.append(os.path.join(self.l10nbase, l10nrelsrcdir))
        if self.l10nmerge or not self.l10nbase:
            # add en-US if we merge, or if it's not l10n
            locdirs.append(os.path.join(self.topsourcedir,
                           relativesrcdir, 'en-US'))
        return locdirs

    def processJarSection(self, jarinfo, jardir):
        '''Internal method called by makeJar to actually process a section
        of a jar.mn file.
        '''

        # chromebasepath is used for chrome registration manifests
        # {0} is getting replaced with chrome/ for chrome.manifest, and with
        # an empty string for jarfile.manifest

        chromebasepath = '{0}' + os.path.basename(jarinfo.name)
        if self.outputFormat == 'jar':
            chromebasepath = 'jar:' + chromebasepath + '.jar!'
        chromebasepath += '/'

        jarfile = os.path.join(jardir, jarinfo.base, jarinfo.name)
        jf = None
        if self.outputFormat == 'jar':
            # jar
            jarfilepath = jarfile + '.jar'
            try:
                os.makedirs(os.path.dirname(jarfilepath))
            except OSError as error:
                if error.errno != errno.EEXIST:
                    raise
            jf = ZipFile(jarfilepath, 'a', lock=True)
            outHelper = self.OutputHelper_jar(jf)
        else:
            outHelper = getattr(self, 'OutputHelper_'
                                + self.outputFormat)(jarfile)

        if jarinfo.relativesrcdir:
            self.localedirs = self.generateLocaleDirs(jarinfo.relativesrcdir)

        for e in jarinfo.entries:
            self._processEntryLine(e, outHelper, jf)

        self.finalizeJar(jardir, jarinfo.base, jarinfo.name, chromebasepath,
                         jarinfo.chrome_manifests)
        if jf is not None:
            jf.close()

    def _processEntryLine(self, e, outHelper, jf):
        out = e.output
        src = e.source

        # pick the right sourcedir -- l10n, topsrc or src

        if e.is_locale:
            # If the file is a Fluent l10n resource, we want to skip the
            # 'en-US' fallbacking.
            #
            # To achieve that, we're testing if we have more than one localedir,
            # and if the last of those has 'en-US' in it.
            # If that's the case, we're removing the last one.
            if (e.source.endswith('.ftl') and
                len(self.localedirs) > 1 and
                'en-US' in self.localedirs[-1]):
                src_base = self.localedirs[:-1]
            else:
                src_base = self.localedirs
        elif src.startswith('/'):
            # path/in/jar/file_name.xul     (/path/in/sourcetree/file_name.xul)
            # refers to a path relative to topsourcedir, use that as base
            # and strip the leading '/'
            src_base = [self.topsourcedir]
            src = src[1:]
        else:
            # use srcdirs and the objdir (current working dir) for relative paths
            src_base = self.sourcedirs + [os.getcwd()]

        if '*' in src:
            def _prefix(s):
                for p in s.split('/'):
                    if '*' not in p:
                        yield p + '/'
            prefix = ''.join(_prefix(src))
            emitted = set()
            for _srcdir in src_base:
                finder = FileFinder(_srcdir)
                for path, _ in finder.find(src):
                    # If the path was already seen in one of the other source
                    # directories, skip it. That matches the non-wildcard case
                    # below, where we pick the first existing file.
                    reduced_path = path[len(prefix):]
                    if reduced_path in emitted:
                        continue
                    emitted.add(reduced_path)
                    e = JarManifestEntry(
                        mozpath.join(out, reduced_path),
                        path,
                        is_locale=e.is_locale,
                        preprocess=e.preprocess,
                    )
                    self._processEntryLine(e, outHelper, jf)
            return

        # check if the source file exists
        realsrc = None
        for _srcdir in src_base:
            if os.path.isfile(os.path.join(_srcdir, src)):
                realsrc = os.path.join(_srcdir, src)
                break
        if realsrc is None:
            if jf is not None:
                jf.close()
            raise RuntimeError('File "{0}" not found in {1}'.format(src,
                               ', '.join(src_base)))

        if out in self._seen_output:
            raise RuntimeError('%s already added' % out)
        self._seen_output.add(out)

        if e.preprocess:
            outf = outHelper.getOutput(out)
            inf = open(realsrc)
            pp = self.pp.clone()
            if src[-4:] == '.css':
                pp.setMarker('%')
            pp.out = outf
            pp.do_include(inf)
            pp.failUnused(realsrc)
            outf.close()
            inf.close()
            return

        # copy or symlink if newer

        if getModTime(realsrc) > outHelper.getDestModTime(e.output):
            if self.outputFormat == 'symlink':
                outHelper.symlink(realsrc, out)
                return
            outf = outHelper.getOutput(out)

            # open in binary mode, this can be images etc

            inf = open(realsrc, 'rb')
            outf.write(inf.read())
            outf.close()
            inf.close()

    class OutputHelper_jar(object):
        '''Provide getDestModTime and getOutput for a given jarfile.'''

        def __init__(self, jarfile):
            self.jarfile = jarfile

        def getDestModTime(self, aPath):
            try:
                info = self.jarfile.getinfo(aPath)
                return info.date_time
            except:
                return 0

        def getOutput(self, name):
            return ZipEntry(name, self.jarfile)

    class OutputHelper_flat(object):
        '''Provide getDestModTime and getOutput for a given flat
        output directory. The helper method ensureDirFor is used by
        the symlink subclass.
        '''

        def __init__(self, basepath):
            self.basepath = basepath

        def getDestModTime(self, aPath):
            return getModTime(os.path.join(self.basepath, aPath))

        def getOutput(self, name):
            out = self.ensureDirFor(name)

            # remove previous link or file
            try:
                os.remove(out)
            except OSError as e:
                if e.errno != errno.ENOENT:
                    raise
            return open(out, 'wb')

        def ensureDirFor(self, name):
            out = os.path.join(self.basepath, name)
            outdir = os.path.dirname(out)
            if not os.path.isdir(outdir):
                try:
                    os.makedirs(outdir)
                except OSError as error:
                    if error.errno != errno.EEXIST:
                        raise
            return out

    class OutputHelper_symlink(OutputHelper_flat):
        '''Subclass of OutputHelper_flat that provides a helper for
        creating a symlink including creating the parent directories.
        '''

        def symlink(self, src, dest):
            out = self.ensureDirFor(dest)

            # remove previous link or file
            try:
                os.remove(out)
            except OSError as e:
                if e.errno != errno.ENOENT:
                    raise
            if sys.platform != 'win32':
                os.symlink(src, out)
            else:
                # On Win32, use ctypes to create a hardlink
                rv = CreateHardLink(out, src, None)
                if rv == 0:
                    raise WinError()
示例#46
0
class JarMaker(object):
    '''JarMaker reads jar.mn files and process those into jar files or
      flat directories, along with chrome.manifest files.
      '''

    ignore = re.compile('\s*(\#.*)?$')
    jarline = re.compile('(?:(?P<jarfile>[\w\d.\-\_\\\/{}]+).jar\:)|(?:\s*(\#.*)?)\s*$')
    relsrcline = re.compile('relativesrcdir\s+(?P<relativesrcdir>.+?):')
    regline = re.compile('\%\s+(.*)$')
    entryre = '(?P<optPreprocess>\*)?(?P<optOverwrite>\+?)\s+'
    entryline = re.compile(entryre
                           + '(?P<output>[\w\d.\-\_\\\/\+\@]+)\s*(\((?P<locale>\%?)(?P<source>[\w\d.\-\_\\\/\@\*]+)\))?\s*$'
                           )

    def __init__(self, outputFormat='flat', useJarfileManifest=True,
        useChromeManifest=False):

        self.outputFormat = outputFormat
        self.useJarfileManifest = useJarfileManifest
        self.useChromeManifest = useChromeManifest
        self.pp = Preprocessor()
        self.topsourcedir = None
        self.sourcedirs = []
        self.localedirs = None
        self.l10nbase = None
        self.l10nmerge = None
        self.relativesrcdir = None
        self.rootManifestAppId = None

    def getCommandLineParser(self):
        '''Get a optparse.OptionParser for jarmaker.

        This OptionParser has the options for jarmaker as well as
        the options for the inner PreProcessor.
        '''

        # HACK, we need to unescape the string variables we get,
        # the perl versions didn't grok strings right

        p = self.pp.getCommandLineParser(unescapeDefines=True)
        p.add_option('-f', type='choice', default='jar',
            choices=('jar', 'flat', 'symlink'),
            help='fileformat used for output',
            metavar='[jar, flat, symlink]',
            )
        p.add_option('-v', action='store_true', dest='verbose',
                     help='verbose output')
        p.add_option('-q', action='store_false', dest='verbose',
                     help='verbose output')
        p.add_option('-e', action='store_true',
                     help='create chrome.manifest instead of jarfile.manifest'
                     )
        p.add_option('-s', type='string', action='append', default=[],
                     help='source directory')
        p.add_option('-t', type='string', help='top source directory')
        p.add_option('-c', '--l10n-src', type='string', action='append'
                     , help='localization directory')
        p.add_option('--l10n-base', type='string', action='store',
                     help='base directory to be used for localization (requires relativesrcdir)'
                     )
        p.add_option('--locale-mergedir', type='string', action='store'
                     ,
                     help='base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)'
                     )
        p.add_option('--relativesrcdir', type='string',
                     help='relativesrcdir to be used for localization')
        p.add_option('-j', type='string', help='jarfile directory')
        p.add_option('--root-manifest-entry-appid', type='string',
                     help='add an app id specific root chrome manifest entry.'
                     )
        return p

    def processIncludes(self, includes):
        '''Process given includes with the inner PreProcessor.

        Only use this for #defines, the includes shouldn't generate
        content.
        '''

        self.pp.out = StringIO()
        for inc in includes:
            self.pp.do_include(inc)
        includesvalue = self.pp.out.getvalue()
        if includesvalue:
            logging.info('WARNING: Includes produce non-empty output')
        self.pp.out = None

    def finalizeJar(self, jarPath, chromebasepath, register, doZip=True):
        '''Helper method to write out the chrome registration entries to
         jarfile.manifest or chrome.manifest, or both.

        The actual file processing is done in updateManifest.
        '''

        # rewrite the manifest, if entries given
        if not register:
            return

        chromeManifest = os.path.join(os.path.dirname(jarPath), '..',
                'chrome.manifest')

        if self.useJarfileManifest:
            self.updateManifest(jarPath + '.manifest',
                                chromebasepath.format(''), register)
            addEntriesToListFile(chromeManifest,
                                 ['manifest chrome/{0}.manifest'.format(os.path.basename(jarPath))])
        if self.useChromeManifest:
            self.updateManifest(chromeManifest,
                                chromebasepath.format('chrome/'),
                                register)

        # If requested, add a root chrome manifest entry (assumed to be in the parent directory
        # of chromeManifest) with the application specific id. In cases where we're building
        # lang packs, the root manifest must know about application sub directories.

        if self.rootManifestAppId:
            rootChromeManifest = \
                os.path.join(os.path.normpath(os.path.dirname(chromeManifest)),
                             '..', 'chrome.manifest')
            rootChromeManifest = os.path.normpath(rootChromeManifest)
            chromeDir = \
                os.path.basename(os.path.dirname(os.path.normpath(chromeManifest)))
            logging.info("adding '%s' entry to root chrome manifest appid=%s"
                          % (chromeDir, self.rootManifestAppId))
            addEntriesToListFile(rootChromeManifest,
                                 ['manifest %s/chrome.manifest application=%s'
                                  % (chromeDir,
                                 self.rootManifestAppId)])

    def updateManifest(self, manifestPath, chromebasepath, register):
        '''updateManifest replaces the % in the chrome registration entries
        with the given chrome base path, and updates the given manifest file.
        '''

        ensureParentDir(manifestPath)
        lock = lock_file(manifestPath + '.lck')
        try:
            myregister = dict.fromkeys(map(lambda s: s.replace('%',
                    chromebasepath), register.iterkeys()))
            manifestExists = os.path.isfile(manifestPath)
            mode = manifestExists and 'r+b' or 'wb'
            mf = open(manifestPath, mode)
            if manifestExists:
                # import previous content into hash, ignoring empty ones and comments
                imf = re.compile('(#.*)?$')
                for l in re.split('[\r\n]+', mf.read()):
                    if imf.match(l):
                        continue
                    myregister[l] = None
                mf.seek(0)
            for k in sorted(myregister.iterkeys()):
                mf.write(k + os.linesep)
            mf.close()
        finally:
            lock = None

    def makeJar(self, infile, jardir):
        '''makeJar is the main entry point to JarMaker.

        It takes the input file, the output directory, the source dirs and the
        top source dir as argument, and optionally the l10n dirs.
        '''

        # making paths absolute, guess srcdir if file and add to sourcedirs
        _normpath = lambda p: os.path.normpath(os.path.abspath(p))
        self.topsourcedir = _normpath(self.topsourcedir)
        self.sourcedirs = [_normpath(p) for p in self.sourcedirs]
        if self.localedirs:
            self.localedirs = [_normpath(p) for p in self.localedirs]
        elif self.relativesrcdir:
            self.localedirs = \
                self.generateLocaleDirs(self.relativesrcdir)
        if isinstance(infile, basestring):
            logging.info('processing ' + infile)
            self.sourcedirs.append(_normpath(os.path.dirname(infile)))
        pp = self.pp.clone()
        pp.out = StringIO()
        pp.do_include(infile)
        lines = PushbackIter(pp.out.getvalue().splitlines())
        try:
            while True:
                l = lines.next()
                m = self.jarline.match(l)
                if not m:
                    raise RuntimeError(l)
                if m.group('jarfile') is None:
                    # comment
                    continue
                self.processJarSection(m.group('jarfile'), lines,
                        jardir)
        except StopIteration:
            # we read the file
            pass
        return

    def generateLocaleDirs(self, relativesrcdir):
        if os.path.basename(relativesrcdir) == 'locales':
            # strip locales
            l10nrelsrcdir = os.path.dirname(relativesrcdir)
        else:
            l10nrelsrcdir = relativesrcdir
        locdirs = []

        # generate locales dirs, merge, l10nbase, en-US
        if self.l10nmerge:
            locdirs.append(os.path.join(self.l10nmerge, l10nrelsrcdir))
        if self.l10nbase:
            locdirs.append(os.path.join(self.l10nbase, l10nrelsrcdir))
        if self.l10nmerge or not self.l10nbase:
            # add en-US if we merge, or if it's not l10n
            locdirs.append(os.path.join(self.topsourcedir,
                           relativesrcdir, 'en-US'))
        return locdirs

    def processJarSection(self, jarfile, lines, jardir):
        '''Internal method called by makeJar to actually process a section
        of a jar.mn file.

        jarfile is the basename of the jarfile or the directory name for
        flat output, lines is a PushbackIter of the lines of jar.mn,
        the remaining options are carried over from makeJar.
        '''

        # chromebasepath is used for chrome registration manifests
        # {0} is getting replaced with chrome/ for chrome.manifest, and with
        # an empty string for jarfile.manifest

        chromebasepath = '{0}' + os.path.basename(jarfile)
        if self.outputFormat == 'jar':
            chromebasepath = 'jar:' + chromebasepath + '.jar!'
        chromebasepath += '/'

        jarfile = os.path.join(jardir, jarfile)
        jf = None
        if self.outputFormat == 'jar':
            # jar
            jarfilepath = jarfile + '.jar'
            try:
                os.makedirs(os.path.dirname(jarfilepath))
            except OSError, error:
                if error.errno != errno.EEXIST:
                    raise
            jf = ZipFile(jarfilepath, 'a', lock=True)
            outHelper = self.OutputHelper_jar(jf)
        else:
class TestPreprocessor(unittest.TestCase):
    """
    Unit tests for the Context class
    """

    def setUp(self):
        self.pp = Preprocessor()
        self.pp.out = StringIO()

    def do_include_compare(self, content_lines, expected_lines):
        content = '%s' % '\n'.join(content_lines)
        expected = '%s'.rstrip() % '\n'.join(expected_lines)

        with MockedOpen({'dummy': content}):
            self.pp.do_include('dummy')
            self.assertEqual(self.pp.out.getvalue().rstrip('\n'), expected)

    def do_include_pass(self, content_lines):
        self.do_include_compare(content_lines, ['PASS'])

    def test_conditional_if_0(self):
        self.do_include_pass([
            '#if 0',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_no_marker(self):
        lines = [
            '#if 0',
            'PASS',
            '#endif',
        ]
        self.pp.setMarker(None)
        self.do_include_compare(lines, lines)

    def test_string_value(self):
        self.do_include_compare([
            '#define FOO STRING',
            '#if FOO',
            'string value is true',
            '#else',
            'string value is false',
            '#endif',
        ], ['string value is false'])

    def test_number_value(self):
        self.do_include_compare([
            '#define FOO 1',
            '#if FOO',
            'number value is true',
            '#else',
            'number value is false',
            '#endif',
        ], ['number value is true'])

    def test_conditional_if_0_elif_1(self):
        self.do_include_pass([
            '#if 0',
            '#elif 1',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_conditional_if_1(self):
        self.do_include_pass([
            '#if 1',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_conditional_if_0_or_1(self):
        self.do_include_pass([
            '#if 0 || 1',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_conditional_if_1_elif_1_else(self):
        self.do_include_pass([
            '#if 1',
            'PASS',
            '#elif 1',
            'FAIL',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_conditional_if_1_if_1(self):
        self.do_include_pass([
            '#if 1',
            '#if 1',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_conditional_not_0(self):
        self.do_include_pass([
            '#if !0',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_conditional_not_0_and_1(self):
        self.do_include_pass([
            '#if !0 && !1',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_conditional_not_1(self):
        self.do_include_pass([
            '#if !1',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_conditional_not_emptyval(self):
        self.do_include_compare([
            '#define EMPTYVAL',
            '#ifndef EMPTYVAL',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
            '#ifdef EMPTYVAL',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ], ['PASS', 'PASS'])

    def test_conditional_not_nullval(self):
        self.do_include_pass([
            '#define NULLVAL 0',
            '#if !NULLVAL',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_expand(self):
        self.do_include_pass([
            '#define ASVAR AS',
            '#expand P__ASVAR__S',
        ])

    def test_undef_defined(self):
        self.do_include_compare([
            '#define BAR',
            '#undef BAR',
            'BAR',
        ], ['BAR'])

    def test_undef_undefined(self):
        self.do_include_compare([
            '#undef BAR',
        ], [])

    def test_filter_attemptSubstitution(self):
        self.do_include_compare([
            '#filter attemptSubstitution',
            '@PASS@',
            '#unfilter attemptSubstitution',
        ], ['@PASS@'])

    def test_filter_emptyLines(self):
        self.do_include_compare([
            'lines with a',
            '',
            'blank line',
            '#filter emptyLines',
            'lines with',
            '',
            'no blank lines',
            '#unfilter emptyLines',
            'yet more lines with',
            '',
            'blank lines',
        ], [
            'lines with a',
            '',
            'blank line',
            'lines with',
            'no blank lines',
            'yet more lines with',
            '',
            'blank lines',
        ])

    def test_filter_slashslash(self):
        self.do_include_compare([
            '#filter slashslash',
            'PASS//FAIL  // FAIL',
            '#unfilter slashslash',
            'PASS // PASS',
        ], [
            'PASS',
            'PASS // PASS',
        ])

    def test_filter_spaces(self):
        self.do_include_compare([
            '#filter spaces',
            'You should see two nice ascii tables',
            ' +-+-+-+',
            ' | |   |     |',
            ' +-+-+-+',
            '#unfilter spaces',
            '+-+---+',
            '| |   |',
            '+-+---+',
        ], [
            'You should see two nice ascii tables',
            '+-+-+-+',
            '| | | |',
            '+-+-+-+',
            '+-+---+',
            '| |   |',
            '+-+---+',
        ])

    def test_filter_substitution(self):
        self.do_include_pass([
            '#define VAR ASS',
            '#filter substitution',
            'P@VAR@',
            '#unfilter substitution',
        ])

    def test_error(self):
        with MockedOpen({'f': '#error spit this message out\n'}):
            with self.assertRaises(Preprocessor.Error) as e:
                self.pp.do_include('f')
                self.assertEqual(e.args[0][-1], 'spit this message out')

    def test_javascript_line(self):
        # The preprocessor is reading the filename from somewhere not caught
        # by MockedOpen.
        tmpdir = mkdtemp()
        try:
            full = os.path.join(tmpdir, 'javascript_line.js.in')
            with open(full, 'w') as fh:
                fh.write('\n'.join([
                    '// Line 1',
                    '#if 0',
                    '// line 3',
                    '#endif',
                    '// line 5',
                    '# comment',
                    '// line 7',
                    '// line 8',
                    '// line 9',
                    '# another comment',
                    '// line 11',
                    '#define LINE 1',
                    '// line 13, given line number overwritten with 2',
                    '',
                ]))

            self.pp.do_include(full)
            out = '\n'.join([
                '// Line 1',
                '//@line 5 "CWDjavascript_line.js.in"',
                '// line 5',
                '//@line 7 "CWDjavascript_line.js.in"',
                '// line 7',
                '// line 8',
                '// line 9',
                '//@line 11 "CWDjavascript_line.js.in"',
                '// line 11',
                '//@line 2 "CWDjavascript_line.js.in"',
                '// line 13, given line number overwritten with 2',
                '',
            ])
            out = out.replace('CWD', tmpdir + os.path.sep)
            self.assertEqual(self.pp.out.getvalue(), out)
        finally:
            shutil.rmtree(tmpdir)

    def test_literal(self):
        self.do_include_pass([
            '#literal PASS',
        ])

    def test_var_directory(self):
        self.do_include_pass([
            '#ifdef DIRECTORY',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_file(self):
        self.do_include_pass([
            '#ifdef FILE',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_if_0(self):
        self.do_include_pass([
            '#define VAR 0',
            '#if VAR',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_var_if_0_elifdef(self):
        self.do_include_pass([
            '#if 0',
            '#elifdef FILE',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_if_0_elifndef(self):
        self.do_include_pass([
            '#if 0',
            '#elifndef VAR',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_ifdef_0(self):
        self.do_include_pass([
            '#define VAR 0',
            '#ifdef VAR',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_ifdef_1_or_undef(self):
        self.do_include_pass([
            '#define FOO 1',
            '#if defined(FOO) || defined(BAR)',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_ifdef_undef(self):
        self.do_include_pass([
            '#define VAR 0',
            '#undef VAR',
            '#ifdef VAR',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_var_ifndef_0(self):
        self.do_include_pass([
            '#define VAR 0',
            '#ifndef VAR',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_var_ifndef_0_and_undef(self):
        self.do_include_pass([
            '#define FOO 0',
            '#if !defined(FOO) && !defined(BAR)',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_var_ifndef_undef(self):
        self.do_include_pass([
            '#define VAR 0',
            '#undef VAR',
            '#ifndef VAR',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_var_line(self):
        self.do_include_pass([
            '#ifdef LINE',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_filterDefine(self):
        self.do_include_pass([
            '#filter substitution',
            '#define VAR AS',
            '#define VAR2 P@VAR@',
            '@VAR2@S',
        ])

    def test_number_value_equals(self):
        self.do_include_pass([
            '#define FOO 1000',
            '#if FOO == 1000',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_default_defines(self):
        self.pp.handleCommandLine(["-DFOO"])
        self.do_include_pass([
            '#if FOO == 1',
            'PASS',
            '#else',
            'FAIL',
        ])

    def test_number_value_equals_defines(self):
        self.pp.handleCommandLine(["-DFOO=1000"])
        self.do_include_pass([
            '#if FOO == 1000',
            'PASS',
            '#else',
            'FAIL',
        ])

    def test_octal_value_equals(self):
        self.do_include_pass([
            '#define FOO 0100',
            '#if FOO == 0100',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_octal_value_equals_defines(self):
        self.pp.handleCommandLine(["-DFOO=0100"])
        self.do_include_pass([
            '#if FOO == 0100',
            'PASS',
            '#else',
            'FAIL',
            '#endif',
        ])

    def test_value_quoted_expansion(self):
        """
        Quoted values on the commandline don't currently have quotes stripped.
        Pike says this is for compat reasons.
        """
        self.pp.handleCommandLine(['-DFOO="ABCD"'])
        self.do_include_compare([
            '#filter substitution',
            '@FOO@',
        ], ['"ABCD"'])

    def test_octal_value_quoted_expansion(self):
        self.pp.handleCommandLine(['-DFOO="0100"'])
        self.do_include_compare([
            '#filter substitution',
            '@FOO@',
        ], ['"0100"'])

    def test_number_value_not_equals_quoted_defines(self):
        self.pp.handleCommandLine(['-DFOO="1000"'])
        self.do_include_pass([
            '#if FOO == 1000',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_octal_value_not_equals_quoted_defines(self):
        self.pp.handleCommandLine(['-DFOO="0100"'])
        self.do_include_pass([
            '#if FOO == 0100',
            'FAIL',
            '#else',
            'PASS',
            '#endif',
        ])

    def test_undefined_variable(self):
        with MockedOpen({'f': '#filter substitution\n@foo@'}):
            with self.assertRaises(Preprocessor.Error) as e:
                self.pp.do_include('f')
                self.assertEqual(e.key, 'UNDEFINED_VAR')

    def test_include(self):
        files = {
            'foo/test': '\n'.join([
                '#define foo foobarbaz',
                '#include @inc@',
                '@bar@',
                '',
            ]),
            'bar': '\n'.join([
                '#define bar barfoobaz',
                '@foo@',
                '',
            ]),
            'f': '\n'.join([
                '#filter substitution',
                '#define inc ../bar',
                '#include foo/test',
                '',
            ]),
        }

        with MockedOpen(files):
            self.pp.do_include('f')
            self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\nbarfoobaz\n')

    def test_include_line(self):
        files = {
            'test.js': '\n'.join([
                '#define foo foobarbaz',
                '#include @inc@',
                '@bar@',
                '',
            ]),
            'bar.js': '\n'.join([
                '#define bar barfoobaz',
                '@foo@',
                '',
            ]),
            'foo.js': '\n'.join([
                'bazfoobar',
                '#include bar.js',
                'bazbarfoo',
                '',
            ]),
            'baz.js': 'baz\n',
            'f.js': '\n'.join([
                '#include foo.js',
                '#filter substitution',
                '#define inc bar.js',
                '#include test.js',
                '#include baz.js',
                'fin',
                '',
            ]),
        }

        with MockedOpen(files):
            self.pp.do_include('f.js')
            self.assertEqual(self.pp.out.getvalue(),
                             ('//@line 1 "CWD/foo.js"\n'
                              'bazfoobar\n'
                              '//@line 2 "CWD/bar.js"\n'
                              '@foo@\n'
                              '//@line 3 "CWD/foo.js"\n'
                              'bazbarfoo\n'
                              '//@line 2 "CWD/bar.js"\n'
                              'foobarbaz\n'
                              '//@line 3 "CWD/test.js"\n'
                              'barfoobaz\n'
                              '//@line 1 "CWD/baz.js"\n'
                              'baz\n'
                              '//@line 6 "CWD/f.js"\n'
                              'fin\n').replace('CWD/',
                                               os.getcwd() + os.path.sep))

    def test_include_missing_file(self):
        with MockedOpen({'f': '#include foo\n'}):
            with self.assertRaises(Preprocessor.Error) as e:
                self.pp.do_include('f')
            self.assertEqual(e.exception.key, 'FILE_NOT_FOUND')

    def test_include_undefined_variable(self):
        with MockedOpen({'f': '#filter substitution\n#include @foo@\n'}):
            with self.assertRaises(Preprocessor.Error) as e:
                self.pp.do_include('f')
            self.assertEqual(e.exception.key, 'UNDEFINED_VAR')

    def test_include_literal_at(self):
        files = {
            '@foo@': '#define foo foobarbaz\n',
            'f': '#include @foo@\n#filter substitution\n@foo@\n',
        }

        with MockedOpen(files):
            self.pp.do_include('f')
            self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\n')

    def test_command_line_literal_at(self):
        with MockedOpen({"@[email protected]": '@foo@\n'}):
            self.pp.handleCommandLine(['-Fsubstitution', '-Dfoo=foobarbaz', '@[email protected]'])
            self.assertEqual(self.pp.out.getvalue(), 'foobarbaz\n')
示例#48
0
    def _consume_jar_manifest(self, obj):
        # Ideally, this would all be handled somehow in the emitter, but
        # this would require all the magic surrounding l10n and addons in
        # the recursive make backend to die, which is not going to happen
        # any time soon enough.
        # Notably missing:
        # - DEFINES from config/config.mk
        # - L10n support
        # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in
        #   moz.build, but it doesn't matter in dist/bin.
        pp = Preprocessor()
        if obj.defines:
            pp.context.update(obj.defines.defines)
        pp.context.update(self.environment.defines)
        pp.context.update(
            AB_CD='en-US',
            BUILD_FASTER=1,
        )
        pp.out = JarManifestParser()
        try:
            pp.do_include(obj.path.full_path)
        except DeprecatedJarManifest as e:
            raise DeprecatedJarManifest('Parsing error while processing %s: %s'
                                        % (obj.path.full_path, e.message))
        self.backend_input_files |= pp.includes

        for jarinfo in pp.out:
            jar_context = Context(
                allowed_variables=VARIABLES, config=obj._context.config)
            jar_context.push_source(obj._context.main_path)
            jar_context.push_source(obj.path.full_path)

            install_target = obj.install_target
            if jarinfo.base:
                install_target = mozpath.normpath(
                    mozpath.join(install_target, jarinfo.base))
            jar_context['FINAL_TARGET'] = install_target
            if obj.defines:
                jar_context['DEFINES'] = obj.defines.defines
            files = jar_context['FINAL_TARGET_FILES']
            files_pp = jar_context['FINAL_TARGET_PP_FILES']

            for e in jarinfo.entries:
                if e.is_locale:
                    if jarinfo.relativesrcdir:
                        src = '/%s' % jarinfo.relativesrcdir
                    else:
                        src = ''
                    src = mozpath.join(src, 'en-US', e.source)
                else:
                    src = e.source

                src = Path(jar_context, src)

                if '*' not in e.source and not os.path.exists(src.full_path):
                    if e.is_locale:
                        raise Exception(
                            '%s: Cannot find %s' % (obj.path, e.source))
                    if e.source.startswith('/'):
                        src = Path(jar_context, '!' + e.source)
                    else:
                        # This actually gets awkward if the jar.mn is not
                        # in the same directory as the moz.build declaring
                        # it, but it's how it works in the recursive make,
                        # not that anything relies on that, but it's simpler.
                        src = Path(obj._context, '!' + e.source)

                output_basename = mozpath.basename(e.output)
                if output_basename != src.target_basename:
                    src = RenamedSourcePath(jar_context,
                                            (src, output_basename))
                path = mozpath.dirname(mozpath.join(jarinfo.name, e.output))

                if e.preprocess:
                    if '*' in e.source:
                        raise Exception('%s: Wildcards are not supported with '
                                        'preprocessing' % obj.path)
                    files_pp[path] += [src]
                else:
                    files[path] += [src]

            if files:
                self.consume_object(FinalTargetFiles(jar_context, files))
            if files_pp:
                self.consume_object(
                    FinalTargetPreprocessedFiles(jar_context, files_pp))

            for m in jarinfo.chrome_manifests:
                entry = parse_manifest_line(
                    mozpath.dirname(jarinfo.name),
                    m.replace('%', mozpath.basename(jarinfo.name) + '/'))
                self.consume_object(ChromeManifestEntry(
                    jar_context, '%s.manifest' % jarinfo.name, entry))
示例#49
0
def main(args):
    pp = Preprocessor()
    pp.handleCommandLine(args, True)
示例#50
0
class JarMaker(object):
    '''JarMaker reads jar.mn files and process those into jar files or
      flat directories, along with chrome.manifest files.
      '''

    def __init__(self, outputFormat='flat', useJarfileManifest=True,
        useChromeManifest=False):

        self.outputFormat = outputFormat
        self.useJarfileManifest = useJarfileManifest
        self.useChromeManifest = useChromeManifest
        self.pp = Preprocessor()
        self.topsourcedir = None
        self.sourcedirs = []
        self.localedirs = None
        self.l10nbase = None
        self.l10nmerge = None
        self.relativesrcdir = None
        self.rootManifestAppId = None
        self._seen_output = set()

    def getCommandLineParser(self):
        '''Get a optparse.OptionParser for jarmaker.

        This OptionParser has the options for jarmaker as well as
        the options for the inner PreProcessor.
        '''

        # HACK, we need to unescape the string variables we get,
        # the perl versions didn't grok strings right

        p = self.pp.getCommandLineParser(unescapeDefines=True)
        p.add_option('-f', type='choice', default='jar',
            choices=('jar', 'flat', 'symlink'),
            help='fileformat used for output',
            metavar='[jar, flat, symlink]',
            )
        p.add_option('-v', action='store_true', dest='verbose',
                     help='verbose output')
        p.add_option('-q', action='store_false', dest='verbose',
                     help='verbose output')
        p.add_option('-e', action='store_true',
                     help='create chrome.manifest instead of jarfile.manifest'
                     )
        p.add_option('-s', type='string', action='append', default=[],
                     help='source directory')
        p.add_option('-t', type='string', help='top source directory')
        p.add_option('-c', '--l10n-src', type='string', action='append'
                     , help='localization directory')
        p.add_option('--l10n-base', type='string', action='store',
                     help='base directory to be used for localization (requires relativesrcdir)'
                     )
        p.add_option('--locale-mergedir', type='string', action='store'
                     ,
                     help='base directory to be used for l10n-merge (requires l10n-base and relativesrcdir)'
                     )
        p.add_option('--relativesrcdir', type='string',
                     help='relativesrcdir to be used for localization')
        p.add_option('-d', type='string', help='base directory')
        p.add_option('--root-manifest-entry-appid', type='string',
                     help='add an app id specific root chrome manifest entry.'
                     )
        return p

    def finalizeJar(self, jardir, jarbase, jarname, chromebasepath, register, doZip=True):
        '''Helper method to write out the chrome registration entries to
         jarfile.manifest or chrome.manifest, or both.

        The actual file processing is done in updateManifest.
        '''

        # rewrite the manifest, if entries given
        if not register:
            return

        chromeManifest = os.path.join(jardir, jarbase, 'chrome.manifest')

        if self.useJarfileManifest:
            self.updateManifest(os.path.join(jardir, jarbase,
                                             jarname + '.manifest'),
                                chromebasepath.format(''), register)
            if jarname != 'chrome':
                addEntriesToListFile(chromeManifest,
                                     ['manifest {0}.manifest'.format(jarname)])
        if self.useChromeManifest:
            chromebase = os.path.dirname(jarname) + '/'
            self.updateManifest(chromeManifest,
                                chromebasepath.format(chromebase), register)

        # If requested, add a root chrome manifest entry (assumed to be in the parent directory
        # of chromeManifest) with the application specific id. In cases where we're building
        # lang packs, the root manifest must know about application sub directories.

        if self.rootManifestAppId:
            rootChromeManifest = \
                os.path.join(os.path.normpath(os.path.dirname(chromeManifest)),
                             '..', 'chrome.manifest')
            rootChromeManifest = os.path.normpath(rootChromeManifest)
            chromeDir = \
                os.path.basename(os.path.dirname(os.path.normpath(chromeManifest)))
            logging.info("adding '%s' entry to root chrome manifest appid=%s"
                          % (chromeDir, self.rootManifestAppId))
            addEntriesToListFile(rootChromeManifest,
                                 ['manifest %s/chrome.manifest application=%s'
                                  % (chromeDir,
                                 self.rootManifestAppId)])

    def updateManifest(self, manifestPath, chromebasepath, register):
        '''updateManifest replaces the % in the chrome registration entries
        with the given chrome base path, and updates the given manifest file.
        '''
        myregister = dict.fromkeys(map(lambda s: s.replace('%',
            chromebasepath), register))
        addEntriesToListFile(manifestPath, myregister.iterkeys())

    def makeJar(self, infile, jardir):
        '''makeJar is the main entry point to JarMaker.

        It takes the input file, the output directory, the source dirs and the
        top source dir as argument, and optionally the l10n dirs.
        '''

        # making paths absolute, guess srcdir if file and add to sourcedirs
        _normpath = lambda p: os.path.normpath(os.path.abspath(p))
        self.topsourcedir = _normpath(self.topsourcedir)
        self.sourcedirs = [_normpath(p) for p in self.sourcedirs]
        if self.localedirs:
            self.localedirs = [_normpath(p) for p in self.localedirs]
        elif self.relativesrcdir:
            self.localedirs = \
                self.generateLocaleDirs(self.relativesrcdir)
        if isinstance(infile, basestring):
            logging.info('processing ' + infile)
            self.sourcedirs.append(_normpath(os.path.dirname(infile)))
        pp = self.pp.clone()
        pp.out = JarManifestParser()
        pp.do_include(infile)

        for info in pp.out:
            self.processJarSection(info, jardir)

    def generateLocaleDirs(self, relativesrcdir):
        if os.path.basename(relativesrcdir) == 'locales':
            # strip locales
            l10nrelsrcdir = os.path.dirname(relativesrcdir)
        else:
            l10nrelsrcdir = relativesrcdir
        locdirs = []

        # generate locales dirs, merge, l10nbase, en-US
        if self.l10nmerge:
            locdirs.append(os.path.join(self.l10nmerge, l10nrelsrcdir))
        if self.l10nbase:
            locdirs.append(os.path.join(self.l10nbase, l10nrelsrcdir))
        if self.l10nmerge or not self.l10nbase:
            # add en-US if we merge, or if it's not l10n
            locdirs.append(os.path.join(self.topsourcedir,
                           relativesrcdir, 'en-US'))
        return locdirs

    def processJarSection(self, jarinfo, jardir):
        '''Internal method called by makeJar to actually process a section
        of a jar.mn file.
        '''

        # chromebasepath is used for chrome registration manifests
        # {0} is getting replaced with chrome/ for chrome.manifest, and with
        # an empty string for jarfile.manifest

        chromebasepath = '{0}' + os.path.basename(jarinfo.name)
        if self.outputFormat == 'jar':
            chromebasepath = 'jar:' + chromebasepath + '.jar!'
        chromebasepath += '/'

        jarfile = os.path.join(jardir, jarinfo.base, jarinfo.name)
        jf = None
        if self.outputFormat == 'jar':
            # jar
            jarfilepath = jarfile + '.jar'
            try:
                os.makedirs(os.path.dirname(jarfilepath))
            except OSError, error:
                if error.errno != errno.EEXIST:
                    raise
            jf = ZipFile(jarfilepath, 'a', lock=True)
            outHelper = self.OutputHelper_jar(jf)
        else:
 def __init__(self, *args, **kwargs):
     Preprocessor.__init__(self, *args, **kwargs)
     self.do_filter('c_substitution')
     self.setMarker('#\s*')
示例#52
0
    def _consume_jar_manifest(self, obj):
        # Ideally, this would all be handled somehow in the emitter, but
        # this would require all the magic surrounding l10n and addons in
        # the recursive make backend to die, which is not going to happen
        # any time soon enough.
        # Notably missing:
        # - DEFINES from config/config.mk
        # - L10n support
        # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in
        #   moz.build, but it doesn't matter in dist/bin.
        pp = Preprocessor()
        if obj.defines:
            pp.context.update(obj.defines.defines)
        pp.context.update(self.environment.defines)
        pp.context.update(
            AB_CD='en-US',
            BUILD_FASTER=1,
        )
        pp.out = JarManifestParser()
        try:
            pp.do_include(obj.path.full_path)
        except DeprecatedJarManifest as e:
            raise DeprecatedJarManifest(
                'Parsing error while processing %s: %s' %
                (obj.path.full_path, e.message))
        self.backend_input_files |= pp.includes

        for jarinfo in pp.out:
            jar_context = Context(allowed_variables=VARIABLES,
                                  config=obj._context.config)
            jar_context.push_source(obj._context.main_path)
            jar_context.push_source(obj.path.full_path)

            install_target = obj.install_target
            if jarinfo.base:
                install_target = mozpath.normpath(
                    mozpath.join(install_target, jarinfo.base))
            jar_context['FINAL_TARGET'] = install_target
            if obj.defines:
                jar_context['DEFINES'] = obj.defines.defines
            files = jar_context['FINAL_TARGET_FILES']
            files_pp = jar_context['FINAL_TARGET_PP_FILES']

            for e in jarinfo.entries:
                if e.is_locale:
                    if jarinfo.relativesrcdir:
                        src = '/%s' % jarinfo.relativesrcdir
                    else:
                        src = ''
                    src = mozpath.join(src, 'en-US', e.source)
                else:
                    src = e.source

                src = Path(jar_context, src)

                if '*' not in e.source and not os.path.exists(src.full_path):
                    if e.is_locale:
                        raise Exception('%s: Cannot find %s' %
                                        (obj.path, e.source))
                    if e.source.startswith('/'):
                        src = Path(jar_context, '!' + e.source)
                    else:
                        # This actually gets awkward if the jar.mn is not
                        # in the same directory as the moz.build declaring
                        # it, but it's how it works in the recursive make,
                        # not that anything relies on that, but it's simpler.
                        src = Path(obj._context, '!' + e.source)

                output_basename = mozpath.basename(e.output)
                if output_basename != src.target_basename:
                    src = RenamedSourcePath(jar_context,
                                            (src, output_basename))
                path = mozpath.dirname(mozpath.join(jarinfo.name, e.output))

                if e.preprocess:
                    if '*' in e.source:
                        raise Exception('%s: Wildcards are not supported with '
                                        'preprocessing' % obj.path)
                    files_pp[path] += [src]
                else:
                    files[path] += [src]

            if files:
                self.consume_object(FinalTargetFiles(jar_context, files))
            if files_pp:
                self.consume_object(
                    FinalTargetPreprocessedFiles(jar_context, files_pp))

            for m in jarinfo.chrome_manifests:
                entry = parse_manifest_line(
                    mozpath.dirname(jarinfo.name),
                    m.replace('%',
                              mozpath.basename(jarinfo.name) + '/'))
                self.consume_object(
                    ChromeManifestEntry(jar_context,
                                        '%s.manifest' % jarinfo.name, entry))
class TestPreprocessor(unittest.TestCase):
  """
  Unit tests for the Context class
  """

  def setUp(self):
    self.pp = Preprocessor()
    self.pp.out = StringIO()

  def test_conditional_if_0(self):
    f = NamedIO("conditional_if_0.in", """#if 0
FAIL
#else
PASS
#endif
""")
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "PASS\n")

  def test_no_marker(self):
    no_marker = """#if 0
PASS
#endif
"""
    f = NamedIO("no_marker.in", no_marker)
    self.pp.setMarker(None)
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), no_marker)

  def test_string_value(self):
    f = NamedIO("string_value.in", """#define FOO STRING
#if FOO
string value is true
#else
string value is false
#endif
""")
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "string value is false\n")
  
  def test_number_value(self):
    f = NamedIO("string_value.in", """#define FOO 1
#if FOO
number value is true
#else
number value is false
#endif
""")
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "number value is true\n")
  
  def test_conditional_if_0_elif_1(self):
    f = NamedIO('conditional_if_0_elif_1.in', '''#if 0
#elif 1
PASS
#else
FAIL
#endif
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "PASS\n")
  
  def test_conditional_if_1(self):
    f = NamedIO('conditional_if_1.in', '''#if 1
PASS
#else
FAILE
#endif
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "PASS\n")
  
  def test_conditional_if_1_elif_1_else(self):
    f = NamedIO('conditional_if_1_elif_1_else.in', '''#if 1
PASS
#elif 1
FAIL
#else
FAIL
#endif
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "PASS\n")
  
  def test_conditional_if_1_if_1(self):
    f = NamedIO('conditional_if_1_if_1.in', '''#if 1
#if 1
PASS
#else
FAIL
#endif
#else
FAIL
#endif
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "PASS\n")
  
  def test_conditional_not_0(self):
    f = NamedIO('conditional_not_0.in', '''#if !0
PASS
#else
FAIL
#endif
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "PASS\n")
  
  def test_conditional_not_1(self):
    f = NamedIO('conditional_not_1.in', '''#if !1
FAIL
#else
PASS
#endif
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "PASS\n")
  
  def test_conditional_not_emptyval(self):
    f = NamedIO('conditional_not_emptyval.in', '''#define EMPTYVAL
#if !EMPTYVAL
FAIL
#else
PASS
#endif
#if EMPTYVAL
PASS
#else
FAIL
#endif
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "PASS\nPASS\n")
  
  def test_conditional_not_nullval(self):
    f = NamedIO('conditional_not_nullval.in', '''#define NULLVAL 0
#if !NULLVAL
PASS
#else
FAIL
#endif
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "PASS\n")
  
  def test_expand(self):
    f = NamedIO('expand.in', '''#define ASVAR AS
#expand P__ASVAR__S
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "PASS\n")

  def test_undef_defined(self):
    f = NamedIO('undef_defined.in', '''#define BAR
#undef BAR
BAR
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "BAR\n")

  def test_undef_undefined(self):
    f = NamedIO('undef_undefined.in', '''#undef VAR
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "")
  
  def test_filter_attemptSubstitution(self):
    f = NamedIO('filter_attemptSubstitution.in', '''#filter attemptSubstitution
@PASS@
#unfilter attemptSubstitution
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "@PASS@\n")
  
  def test_filter_emptyLines(self):
    f = NamedIO('filter_emptyLines.in', '''lines with a

blank line
#filter emptyLines
lines with

no blank lines
#unfilter emptyLines
yet more lines with

blank lines
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), '''lines with a

blank line
lines with
no blank lines
yet more lines with

blank lines
''')
  
  def test_filter_slashslash(self):
    f = NamedIO('filter_slashslash.in', '''#filter slashslash
PASS//FAIL  // FAIL
#unfilter slashslash
PASS // PASS
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "PASS\nPASS // PASS\n")
  
  def test_filter_spaces(self):
    f = NamedIO('filter_spaces.in', '''#filter spaces
You should see two nice ascii tables
 +-+-+-+
 | |   |     |
 +-+-+-+
#unfilter spaces
+-+---+
| |   |
+-+---+ 
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), """You should see two nice ascii tables
+-+-+-+
| | | |
+-+-+-+
+-+---+
| |   |
+-+---+ 
""")
  
  def test_filter_substitution(self):
    f = NamedIO('filter_substitution.in', '''#define VAR ASS
#filter substitution
P@VAR@
#unfilter substitution
''')
    self.pp.do_include(f)
    self.assertEqual(self.pp.out.getvalue(), "PASS\n")

  def test_error(self):
    f = NamedIO('error.in', '''#error spit this message out
''')
    caught_msg = None
    try:
      self.pp.do_include(f)
    except Preprocessor.Error, e:
      caught_msg = e.args[0][-1]
    self.assertEqual(caught_msg, 'spit this message out')
示例#54
0
 def __init__(self, *args, **kwargs):
     Preprocessor.__init__(self, *args, **kwargs)
     self.do_filter("c_substitution")
     self.setMarker("#\s*")
 def setUp(self):
     self.pp = Preprocessor()
     self.pp.out = StringIO()
         TINT_FOREGROUND_DRAWABLE=1),
    dict(VIEW_NAME_SUFFIX='LinearLayout',
         BASE_TYPE='android.widget.LinearLayout'),
    dict(VIEW_NAME_SUFFIX='RelativeLayout',
         BASE_TYPE='android.widget.RelativeLayout',
         STYLE_CONSTRUCTOR=1),
    dict(VIEW_NAME_SUFFIX='TextSwitcher',
         BASE_TYPE='android.widget.TextSwitcher'),
    dict(VIEW_NAME_SUFFIX='TextView',
         BASE_TYPE='android.widget.TextView',
         STYLE_CONSTRUCTOR=1),
    dict(VIEW_NAME_SUFFIX='View',
         BASE_TYPE='android.view.View',
         STYLE_CONSTRUCTOR=1),
    dict(VIEW_NAME_SUFFIX='ListView',
         BASE_TYPE='android.widget.ListView',
         STYLE_CONSTRUCTOR=1),
    dict(VIEW_NAME_SUFFIX='ProgressBar',
         BASE_TYPE='android.widget.ProgressBar',
         STYLE_CONSTRUCTOR=1),
]

for view in views:
    pp = Preprocessor(defines=view, marker='//#')

    dest = os.path.join(__DIR__, dest_format_string % view)
    with open(template, 'rU') as input:
        with open(dest, 'wt') as output:
            pp.processFile(input=input, output=output)
            print('%s' % dest)
示例#57
0
def parse_defines(paths):
    pp = Preprocessor()
    for path in paths:
        pp.do_include(path)

    return pp.context