Exemplo n.º 1
0
def main(output, input_file, version):
    pp = Preprocessor()
    pp.context.update({
        'VERSION': version,
    })
    pp.out = output
    pp.do_include(input_file)
Exemplo n.º 2
0
def main(output, input_file):
    pp = Preprocessor()
    pp.context.update(buildconfig.defines['ALLDEFINES'])

    substs = buildconfig.substs

    # Substs taken verbatim.
    substs_vars = ('BIN_SUFFIX', )
    for var in substs_vars:
        pp.context[var] = '"%s"' % substs[var]

    # Derived values.
    for key, condition in (('IS_MAC', substs['OS_ARCH'] == 'Darwin'),
                           ('IS_LINUX', substs['OS_ARCH'] == 'Linux'),
                           ('IS_TEST_BUILD',
                            substs.get('ENABLE_TESTS') == '1'),
                           ('IS_DEBUG_BUILD', substs.get('MOZ_DEBUG') == '1'),
                           ('CRASHREPORTER', substs.get('MOZ_CRASHREPORTER')),
                           ('IS_ASAN', substs.get('MOZ_ASAN'))):
        if condition:
            pp.context[key] = '1'
        else:
            pp.context[key] = '0'

    pp.context.update({
        'XPC_BIN_PATH':
        '"%s/dist/bin"' % buildconfig.topobjdir,
        'CERTS_SRC_DIR':
        '"%s/build/pgo/certs"' % buildconfig.topsrcdir,
    })

    pp.out = output
    pp.do_include(input_file)
Exemplo n.º 3
0
def main(args):
    pp = Preprocessor()
    optparser = pp.getCommandLineParser()
    optparser.add_option('--nss-file', action='append',
                         type='string', dest='nss_files', default=[],
                         help='Specify a .def file that should have NSS\'s processing rules applied to it')
    options, deffiles = optparser.parse_args(args)

    symbols = set()
    for f in options.nss_files:
        symbols |= extract_symbols(nss_preprocess_file(f))
    for f in deffiles:
        # Start each deffile off with a clean slate.
        defpp = pp.clone()
        symbols |= extract_symbols(preprocess_file(defpp, f))

    script = """{
global:
  %s
local:
  *;
};
"""
    with FileAvoidWrite(options.output) as f:
        f.write(script % '\n  '.join("%s;" % s for s in sorted(symbols)))
Exemplo n.º 4
0
def main():
    parser = argparse.ArgumentParser(description='Find duplicate files in directory.')
    parser.add_argument('--warning', '-w', action='store_true',
                        help='Only warn about duplicates, do not exit with an error')
    parser.add_argument('--file', '-f', action='append', dest='dupes_files', default=[],
                        help='Add exceptions to the duplicate list from this file')
    parser.add_argument('-D', action=DefinesAction)
    parser.add_argument('-U', action='append', default=[])
    parser.add_argument('directory',
                        help='The directory to check for duplicates in')

    args = parser.parse_args()

    allowed_dupes = []
    for filename in args.dupes_files:
        pp = Preprocessor()
        pp.context.update(buildconfig.defines['ALLDEFINES'])
        if args.D:
            pp.context.update(args.D)
        for undefine in args.U:
            if undefine in pp.context:
                del pp.context[undefine]
        pp.out = StringIO()
        pp.do_filter('substitution')
        pp.do_include(filename)
        allowed_dupes.extend([line.partition('#')[0].rstrip()
                              for line in pp.out.getvalue().splitlines()])

    find_dupes(args.directory, bail=not args.warning, allowed_dupes=allowed_dupes)
def main(output, input_file):
    pp = Preprocessor()
    pp.context.update({
        'VERSION':
        'xul%s' % buildconfig.substs['MOZILLA_SYMBOLVERSION'],
    })
    pp.out = output
    pp.do_include(input_file)
Exemplo n.º 6
0
def preprocess(path, defines):
    pp = Preprocessor(defines=defines, marker='%')
    pp.context.update(defines)
    pp.out = io.StringIO()
    pp.do_filter('substitution')
    pp.do_include(io.open(path, 'r', encoding='latin1'))
    pp.out.seek(0)
    return pp.out
Exemplo n.º 7
0
def preprocess(path, defines):
    pp = Preprocessor(defines=defines, marker="%")
    pp.context.update(defines)
    pp.out = io.StringIO()
    pp.do_filter("substitution")
    pp.do_include(io.open(path, "r", encoding="latin1"))
    pp.out.seek(0)
    return pp.out
Exemplo n.º 8
0
    def copy(self, dest, skip_if_older=True):
        '''
        Invokes the preprocessor to create the destination file.
        '''
        if isinstance(dest, six.string_types):
            dest = Dest(dest)
        else:
            assert isinstance(dest, Dest)

        # We have to account for the case where the destination exists and is a
        # symlink to something. Since we know the preprocessor is certainly not
        # going to create a symlink, we can just remove the existing one. If the
        # destination is not a symlink, we leave it alone, since we're going to
        # overwrite its contents anyway.
        # If symlinks aren't supported at all, we can skip this step.
        # See comment in AbsoluteSymlinkFile about Windows.
        if hasattr(os, 'symlink') and platform.system() != 'Windows':
            if os.path.islink(dest.path):
                os.remove(dest.path)

        pp_deps = set(self.extra_depends)

        # If a dependency file was specified, and it exists, add any
        # dependencies from that file to our list.
        if self.depfile and os.path.exists(self.depfile):
            target = mozpath.normpath(dest.name)
            with _open(self.depfile, 'rt') as fileobj:
                for rule in makeutil.read_dep_makefile(fileobj):
                    if target in rule.targets():
                        pp_deps.update(rule.dependencies())

        skip = False
        if dest.exists() and skip_if_older:
            # If a dependency file was specified, and it doesn't exist,
            # assume that the preprocessor needs to be rerun. That will
            # regenerate the dependency file.
            if self.depfile and not os.path.exists(self.depfile):
                skip = False
            else:
                skip = not BaseFile.any_newer(dest.path, pp_deps)

        if skip:
            return False

        deps_out = None
        if self.depfile:
            deps_out = FileAvoidWrite(self.depfile)
        pp = Preprocessor(defines=self.defines, marker=self.marker)
        pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings)

        with _open(self.path, 'rU') as input:
            pp.processFile(input=input, output=dest, depfile=deps_out)

        dest.close()
        if self.depfile:
            deps_out.close()

        return True
Exemplo n.º 9
0
def preprocess(input, parser, defines={}):
    '''
    Preprocess the file-like input with the given defines, and send the
    preprocessed output line by line to the given parser.
    '''
    pp = Preprocessor()
    pp.context.update(defines)
    pp.do_filter('substitution')
    pp.out = PreprocessorOutputWrapper(pp, parser)
    pp.do_include(input)
Exemplo n.º 10
0
    def inputs(self):
        pp = Preprocessor(defines=self.defines, marker=self.marker)
        pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings)

        with open(self.path, 'rU') as input:
            with open(os.devnull, 'w') as output:
                pp.processFile(input=input, output=output)

        # This always yields at least self.path.
        return pp.includes
Exemplo n.º 11
0
def emit_code(fd, pref_list_filename):
    pp = Preprocessor()
    pp.context.update(buildconfig.defines["ALLDEFINES"])

    # A necessary hack until MOZ_DEBUG_FLAGS are part of buildconfig.defines.
    if buildconfig.substs.get("MOZ_DEBUG"):
        pp.context["DEBUG"] = "1"

    if buildconfig.substs.get("CPU_ARCH") == "aarch64":
        pp.context["MOZ_AARCH64"] = True

    pp.out = StringIO()
    pp.do_filter("substitution")
    pp.do_include(pref_list_filename)

    try:
        pref_list = yaml.safe_load(pp.out.getvalue())
        input_file = os.path.relpath(
            pref_list_filename,
            os.environ.get("GECKO_PATH", os.environ.get("TOPSRCDIR")),
        )
        code = generate_code(pref_list, input_file)
    except (IOError, ValueError) as e:
        print("{}: error:\n  {}\n".format(pref_list_filename, e))
        sys.exit(1)

    # When generating multiple files from a script, the build system treats the
    # first named output file (StaticPrefListAll.h in this case) specially -- it
    # is created elsewhere, and written to via `fd`.
    fd.write(code["static_pref_list_all_h"])

    # We must create the remaining output files ourselves. This requires
    # creating the output directory directly if it doesn't already exist.
    ensureParentDir(fd.name)
    init_dirname = os.path.dirname(fd.name)

    with FileAvoidWrite("StaticPrefsAll.h") as fd:
        fd.write(code["static_prefs_all_h"])

    for group, text in sorted(code["static_pref_list_group_h"].items()):
        filename = "StaticPrefList_{}.h".format(group)
        with FileAvoidWrite(os.path.join(init_dirname, filename)) as fd:
            fd.write(text)

    for group, text in sorted(code["static_prefs_group_h"].items()):
        filename = "StaticPrefs_{}.h".format(group)
        with FileAvoidWrite(filename) as fd:
            fd.write(text)

    with FileAvoidWrite(os.path.join(init_dirname,
                                     "StaticPrefsCGetters.cpp")) as fd:
        fd.write(code["static_prefs_c_getters_cpp"])

    with FileAvoidWrite("static_prefs.rs") as fd:
        fd.write(code["static_prefs_rs"])
Exemplo n.º 12
0
def main(output, input_file):
    pp = Preprocessor()
    pp.context.update({
        'FFI_EXEC_TRAMPOLINE_TABLE': '0',
        'HAVE_LONG_DOUBLE': '0',
        'TARGET': buildconfig.substs['FFI_TARGET'],
        'VERSION': '',
    })
    pp.do_filter('substitution')
    pp.setMarker(None)
    pp.out = output
    pp.do_include(input_file)
Exemplo n.º 13
0
def main(output, input_file, *defines):
    pp = Preprocessor()
    pp.context.update({
        "FFI_EXEC_TRAMPOLINE_TABLE": "0",
        "HAVE_LONG_DOUBLE": "0",
        "TARGET": buildconfig.substs["FFI_TARGET"],
        "VERSION": "",
    })
    for d in defines:
        pp.context.update({d: "1"})
    pp.do_filter("substitution")
    pp.setMarker(None)
    pp.out = output
    pp.do_include(input_file)
Exemplo n.º 14
0
    def __init__(self, outputFormat='flat', useJarfileManifest=True,
        useChromeManifest=False):

        self.outputFormat = outputFormat
        self.useJarfileManifest = useJarfileManifest
        self.useChromeManifest = useChromeManifest
        self.pp = Preprocessor()
        self.topsourcedir = None
        self.sourcedirs = []
        self.localedirs = None
        self.l10nbase = None
        self.l10nmerge = None
        self.relativesrcdir = None
        self.rootManifestAppId = None
Exemplo n.º 15
0
def process_package_overload(src, dst, version, app_buildid):
    ensureParentDir(dst)
    # First replace numeric version like '1.3'
    # Then replace with 'slashed' version like '1_4'
    # Finally set the full length addon version like 1.3.20131230
    defines = {
        "NUM_VERSION": version,
        "SLASH_VERSION": version.replace(".", "_"),
        "FULL_VERSION": ("%s.%s" % (version, app_buildid))
    }
    pp = Preprocessor(defines=defines)
    pp.do_filter("substitution")
    with open(dst, "w") as output:
        with open(src, "r") as input:
            pp.processFile(input=input, output=output)
Exemplo n.º 16
0
def rnp_preprocess(tmpl, dest, defines):
    """
    Generic preprocessing
    :param BinaryIO tmpl: open filehandle (read) input
    :param BinaryIO dest: open filehandle (write) output
    :param dict defines: result of get_defines()
    :return boolean:
    """
    pp = Preprocessor()
    pp.setMarker("%")
    pp.addDefines(defines)
    pp.do_filter("substitution")
    pp.out = dest
    pp.do_include(tmpl, True)
    return True
Exemplo n.º 17
0
def emit_code(fd, pref_list_filename):
    pp = Preprocessor()
    pp.context.update(buildconfig.defines['ALLDEFINES'])

    # A necessary hack until MOZ_DEBUG_FLAGS are part of buildconfig.defines.
    if buildconfig.substs.get('MOZ_DEBUG'):
        pp.context['DEBUG'] = '1'

    pp.out = BytesIO()
    pp.do_filter('substitution')
    pp.do_include(pref_list_filename)

    try:
        pref_list = yaml.safe_load(pp.out.getvalue())
        code = generate_code(pref_list)
    except (IOError, ValueError) as e:
        print('{}: error:\n  {}\n'.format(pref_list_filename, e))
        sys.exit(1)

    # When generating multiple files from a script, the build system treats the
    # first named output file (StaticPrefListAll.h in this case) specially -- it
    # is created elsewhere, and written to via `fd`.
    fd.write(code['static_pref_list_all_h'])

    # We must create the remaining output files ourselves. This requires
    # creating the output directory directly if it doesn't already exist.
    ensureParentDir(fd.name)
    init_dirname = os.path.dirname(fd.name)

    with FileAvoidWrite('StaticPrefsAll.h') as fd:
        fd.write(code['static_prefs_all_h'])

    for group, text in sorted(code['static_pref_list_group_h'].items()):
        filename = 'StaticPrefList_{}.h'.format(group)
        with FileAvoidWrite(os.path.join(init_dirname, filename)) as fd:
            fd.write(text)

    for group, text in sorted(code['static_prefs_group_h'].items()):
        filename = 'StaticPrefs_{}.h'.format(group)
        with FileAvoidWrite(filename) as fd:
            fd.write(text)

    with FileAvoidWrite(os.path.join(init_dirname,
                                     'StaticPrefsCGetters.cpp')) as fd:
        fd.write(code['static_prefs_c_getters_cpp'])

    with FileAvoidWrite('static_prefs.rs') as fd:
        fd.write(code['static_prefs_rs'])
Exemplo n.º 18
0
def preprocess_file(src, dst, version, app_buildid, update_url):
    ensureParentDir(dst)

    defines = {
        "ADDON_ID": "fxos_" + version.replace(".", "_") + "*****@*****.**",
        # (reduce the app build id to only the build date
        # as addon manager doesn't handle big ints in addon versions)
        "ADDON_VERSION": ("%s.%s" % (version, app_buildid[:8])),
        "ADDON_NAME": "Firefox OS " + version + " Simulator",
        "ADDON_DESCRIPTION": "a Firefox OS " + version + " simulator",
        "ADDON_UPDATE_URL": update_url
    }
    pp = Preprocessor(defines=defines)
    pp.do_filter("substitution")
    with open(dst, "w") as output:
        with open(src, "r") as input:
            pp.processFile(input=input, output=output)
Exemplo n.º 19
0
    def _get_preprocessor(self, output, extra):
        '''Returns a preprocessor for use by create_config_file and
        create_makefile.
        '''
        path = output.name
        pp = Preprocessor()
        pp.context.update(self.substs)
        pp.context.update(top_srcdir=self.get_top_srcdir(path))
        pp.context.update(srcdir=self.get_file_srcdir(path))
        pp.context.update(relativesrcdir=self.get_relative_srcdir(path))
        pp.context.update(DEPTH=self.get_depth(path))
        if extra:
            pp.context.update(extra)
        pp.do_filter('attemptSubstitution')
        pp.setMarker(None)

        pp.out = output
        return pp
Exemplo n.º 20
0
def main():
    parser = argparse.ArgumentParser(
        description="Find duplicate files in directory.")
    parser.add_argument(
        "--warning",
        "-w",
        action="store_true",
        help="Only warn about duplicates, do not exit with an error",
    )
    parser.add_argument(
        "--file",
        "-f",
        action="append",
        dest="dupes_files",
        default=[],
        help="Add exceptions to the duplicate list from this file",
    )
    parser.add_argument("-D", action=DefinesAction)
    parser.add_argument("-U", action="append", default=[])
    parser.add_argument("directory",
                        help="The directory to check for duplicates in")

    args = parser.parse_args()

    allowed_dupes = []
    for filename in args.dupes_files:
        pp = Preprocessor()
        pp.context.update(buildconfig.defines["ALLDEFINES"])
        if args.D:
            pp.context.update(args.D)
        for undefine in args.U:
            if undefine in pp.context:
                del pp.context[undefine]
        pp.out = StringIO()
        pp.do_filter("substitution")
        pp.do_include(filename)
        allowed_dupes.extend([
            line.partition("#")[0].rstrip()
            for line in pp.out.getvalue().splitlines()
        ])

    find_dupes(args.directory,
               bail=not args.warning,
               allowed_dupes=allowed_dupes)
Exemplo n.º 21
0
def main(output, input_file):
    with open(input_file) as fh:
        if buildconfig.substs['EXPAND_LIBS_LIST_STYLE'] == 'linkerscript':
            def cleanup(line):
                assert line.startswith('INPUT("')
                assert line.endswith('")')
                return line[len('INPUT("'):-len('")')]

            objs = [cleanup(l.strip()) for l in fh.readlines()]
        else:
            objs = [l.strip() for l in fh.readlines()]

    pp = Preprocessor()
    pp.out = StringIO()
    pp.do_include(os.path.join(buildconfig.topobjdir, 'buildid.h'))
    buildid = pp.context['MOZ_BUILDID']
    output.write(
        'extern const char gToolkitBuildID[] = "%s";' % buildid
    )
    return set(o for o in objs
               if os.path.splitext(os.path.basename(o))[0] != 'buildid')
def load_yaml(yaml_path):
    # First invoke preprocessor.py so that we can use #ifdef JS_SIMULATOR in
    # the YAML file.
    pp = Preprocessor()
    pp.context.update(buildconfig.defines['ALLDEFINES'])
    pp.out = six.StringIO()
    pp.do_filter('substitution')
    pp.do_include(yaml_path)
    contents = pp.out.getvalue()

    # Load into an OrderedDict to ensure order is preserved. Note: Python 3.7+
    # also preserves ordering for normal dictionaries.
    # Code based on https://stackoverflow.com/a/21912744.
    class OrderedLoader(yaml.Loader):
        pass

    def construct_mapping(loader, node):
        loader.flatten_mapping(node)
        return OrderedDict(loader.construct_pairs(node))

    tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
    OrderedLoader.add_constructor(tag, construct_mapping)
    return yaml.load(contents, OrderedLoader)
Exemplo n.º 23
0
    def _consume_jar_manifest(self, obj):
        # Ideally, this would all be handled somehow in the emitter, but
        # this would require all the magic surrounding l10n and addons in
        # the recursive make backend to die, which is not going to happen
        # any time soon enough.
        # Notably missing:
        # - DEFINES from config/config.mk
        # - L10n support
        # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in
        #   moz.build, but it doesn't matter in dist/bin.
        pp = Preprocessor()
        if obj.defines:
            pp.context.update(obj.defines.defines)
        pp.context.update(self.environment.defines)
        pp.context.update(
            AB_CD='en-US',
            BUILD_FASTER=1,
        )
        pp.out = JarManifestParser()
        try:
            pp.do_include(obj.path.full_path)
        except DeprecatedJarManifest as e:
            raise DeprecatedJarManifest(
                'Parsing error while processing %s: %s' %
                (obj.path.full_path, e.message))
        self.backend_input_files |= pp.includes

        for jarinfo in pp.out:
            jar_context = Context(allowed_variables=VARIABLES,
                                  config=obj._context.config)
            jar_context.push_source(obj._context.main_path)
            jar_context.push_source(obj.path.full_path)

            install_target = obj.install_target
            if jarinfo.base:
                install_target = mozpath.normpath(
                    mozpath.join(install_target, jarinfo.base))
            jar_context['FINAL_TARGET'] = install_target
            if obj.defines:
                jar_context['DEFINES'] = obj.defines.defines
            files = jar_context['FINAL_TARGET_FILES']
            files_pp = jar_context['FINAL_TARGET_PP_FILES']

            for e in jarinfo.entries:
                if e.is_locale:
                    if jarinfo.relativesrcdir:
                        src = '/%s' % jarinfo.relativesrcdir
                    else:
                        src = ''
                    src = mozpath.join(src, 'en-US', e.source)
                else:
                    src = e.source

                src = Path(jar_context, src)

                if '*' not in e.source and not os.path.exists(src.full_path):
                    if e.is_locale:
                        raise Exception('%s: Cannot find %s' %
                                        (obj.path, e.source))
                    if e.source.startswith('/'):
                        src = Path(jar_context, '!' + e.source)
                    else:
                        # This actually gets awkward if the jar.mn is not
                        # in the same directory as the moz.build declaring
                        # it, but it's how it works in the recursive make,
                        # not that anything relies on that, but it's simpler.
                        src = Path(obj._context, '!' + e.source)

                output_basename = mozpath.basename(e.output)
                if output_basename != src.target_basename:
                    src = RenamedSourcePath(jar_context,
                                            (src, output_basename))
                path = mozpath.dirname(mozpath.join(jarinfo.name, e.output))

                if e.preprocess:
                    if '*' in e.source:
                        raise Exception('%s: Wildcards are not supported with '
                                        'preprocessing' % obj.path)
                    files_pp[path] += [src]
                else:
                    files[path] += [src]

            if files:
                self.consume_object(FinalTargetFiles(jar_context, files))
            if files_pp:
                self.consume_object(
                    FinalTargetPreprocessedFiles(jar_context, files_pp))

            for m in jarinfo.chrome_manifests:
                entry = parse_manifest_line(
                    mozpath.dirname(jarinfo.name),
                    m.replace('%',
                              mozpath.basename(jarinfo.name) + '/'))
                self.consume_object(
                    ChromeManifestEntry(jar_context,
                                        '%s.manifest' % jarinfo.name, entry))
Exemplo n.º 24
0
 def setUp(self):
     self.pp = Preprocessor()
     self.pp.out = StringIO()
Exemplo n.º 25
0
def generate_symbols_file(output, *args):
    ''' '''
    parser = argparse.ArgumentParser()
    parser.add_argument('input')
    parser.add_argument('-D', action=DefinesAction)
    parser.add_argument('-U', action='append', default=[])
    args = parser.parse_args(args)
    input = os.path.abspath(args.input)

    pp = Preprocessor()
    pp.context.update(buildconfig.defines['ALLDEFINES'])
    if args.D:
        pp.context.update(args.D)
    for undefine in args.U:
        if undefine in pp.context:
            del pp.context[undefine]
    # Hack until MOZ_DEBUG_FLAGS are simply part of buildconfig.defines
    if buildconfig.substs.get('MOZ_DEBUG'):
        pp.context['DEBUG'] = '1'
    # Ensure @DATA@ works as expected (see the Windows section further below)
    if buildconfig.substs['OS_TARGET'] == 'WINNT':
        pp.context['DATA'] = 'DATA'
    else:
        pp.context['DATA'] = ''
    pp.out = StringIO()
    pp.do_filter('substitution')
    pp.do_include(input)

    symbols = [s.strip() for s in pp.out.getvalue().splitlines() if s.strip()]

    if buildconfig.substs['OS_TARGET'] == 'WINNT':
        # A def file is generated for MSVC link.exe that looks like the
        # following:
        # LIBRARY library.dll
        # EXPORTS
        #   symbol1
        #   symbol2
        #   ...
        #
        # link.exe however requires special markers for data symbols, so in
        # that case the symbols look like:
        #   data_symbol1 DATA
        #   data_symbol2 DATA
        #   ...
        #
        # In the input file, this is just annotated with the following syntax:
        #   data_symbol1 @DATA@
        #   data_symbol2 @DATA@
        #   ...
        # The DATA variable is "simply" expanded by the preprocessor, to
        # nothing on non-Windows, such that we only get the symbol name on
        # those platforms, and to DATA on Windows, so that the "DATA" part
        # is, in fact, part of the symbol name as far as the symbols variable
        # is concerned.
        libname, ext = os.path.splitext(os.path.basename(output.name))
        assert ext == '.def'
        output.write('LIBRARY %s\nEXPORTS\n  %s\n' %
                     (libname, '\n  '.join(symbols)))
    elif buildconfig.substs['GCC_USE_GNU_LD']:
        # A linker version script is generated for GNU LD that looks like the
        # following:
        # {
        # global:
        #   symbol1;
        #   symbol2;
        #   ...
        # local:
        #   *;
        # };
        output.write('{\nglobal:\n  %s;\nlocal:\n  *;\n};' %
                     ';\n  '.join(symbols))
    elif buildconfig.substs['OS_TARGET'] == 'Darwin':
        # A list of symbols is generated for Apple ld that simply lists all
        # symbols, with an underscore prefix.
        output.write(''.join('_%s\n' % s for s in symbols))

    return set(pp.includes)
Exemplo n.º 26
0
def parse_defines(paths):
    pp = Preprocessor()
    for path in paths:
        pp.do_include(path)

    return pp.context
Exemplo n.º 27
0
def main(args):
    pp = Preprocessor()
    pp.handleCommandLine(args, True)
Exemplo n.º 28
0
def generate(output, *args):
    pp = Preprocessor()
    pp.out = output
    pp.handleCommandLine(list(args), True)
    return set(pp.includes)
Exemplo n.º 29
0
         TINT_FOREGROUND_DRAWABLE=1),
    dict(VIEW_NAME_SUFFIX='LinearLayout',
         BASE_TYPE='android.widget.LinearLayout'),
    dict(VIEW_NAME_SUFFIX='RelativeLayout',
         BASE_TYPE='android.widget.RelativeLayout',
         STYLE_CONSTRUCTOR=1),
    dict(VIEW_NAME_SUFFIX='TextSwitcher',
         BASE_TYPE='android.widget.TextSwitcher'),
    dict(VIEW_NAME_SUFFIX='TextView',
         BASE_TYPE='android.widget.TextView',
         STYLE_CONSTRUCTOR=1),
    dict(VIEW_NAME_SUFFIX='View',
         BASE_TYPE='android.view.View',
         STYLE_CONSTRUCTOR=1),
    dict(VIEW_NAME_SUFFIX='ListView',
         BASE_TYPE='android.widget.ListView',
         STYLE_CONSTRUCTOR=1),
    dict(VIEW_NAME_SUFFIX='ProgressBar',
         BASE_TYPE='android.widget.ProgressBar',
         STYLE_CONSTRUCTOR=1),
]

for view in views:
    pp = Preprocessor(defines=view, marker='//#')

    dest = os.path.join(__DIR__, dest_format_string % view)
    with open(template, 'rU') as input:
        with open(dest, 'wt') as output:
            pp.processFile(input=input, output=output)
            print('%s' % dest)
Exemplo n.º 30
0
 def setUp(self):
   self.pp = Preprocessor()
   self.pp.out = StringIO()
   self.tempnam = os.tempnam('.')