Exemplo n.º 1
0
    def _global_dependencies_changed(self):
        """Determine whether the global dependencies have changed."""
        current_files = set(iter_modules_in_path(mozpath.dirname(__file__)))

        # We need to catch other .py files from /dom/bindings. We assume these
        # are in the same directory as the config file.
        current_files |= set(iter_modules_in_path(mozpath.dirname(self._config_path)))

        current_files.add(self._config_path)

        current_hashes = {}
        for f in current_files:
            # This will fail if the file doesn't exist. If a current global
            # dependency doesn't exist, something else is wrong.
            with open(f, 'rb') as fh:
                current_hashes[f] = hashlib.sha1(fh.read()).hexdigest()

        # The set of files has changed.
        if current_files ^ set(self._state['global_depends'].keys()):
            return True, current_hashes

        # Compare hashes.
        for f, sha1 in current_hashes.items():
            if sha1 != self._state['global_depends'][f]:
                return True, current_hashes

        return False, current_hashes
Exemplo n.º 2
0
    def _global_dependencies_changed(self):
        """Determine whether the global dependencies have changed."""
        current_files = set(iter_modules_in_path(mozpath.dirname(__file__)))

        # We need to catch other .py files from /dom/bindings. We assume these
        # are in the same directory as the config file.
        current_files |= set(
            iter_modules_in_path(mozpath.dirname(self._config_path)))

        current_files.add(self._config_path)

        current_hashes = {}
        for f in current_files:
            # This will fail if the file doesn't exist. If a current global
            # dependency doesn't exist, something else is wrong.
            with open(f, 'rb') as fh:
                current_hashes[f] = hashlib.sha1(fh.read()).hexdigest()

        # The set of files has changed.
        if current_files ^ set(self._state['global_depends'].keys()):
            return True, current_hashes

        # Compare hashes.
        for f, sha1 in current_hashes.items():
            if sha1 != self._state['global_depends'][f]:
                return True, current_hashes

        return False, current_hashes
Exemplo n.º 3
0
def generate_binding_files(config, outputprefix, srcprefix, webidlfile,
                           generatedEventsWebIDLFiles):
    """
    |config| Is the configuration object.
    |outputprefix| is a prefix to use for the header guards and filename.
    """

    depsname = ".deps/" + outputprefix + ".pp"
    root = CGBindingRoot(config, outputprefix, webidlfile)
    replaceFileIfChanged(outputprefix + ".h", root.declare())
    replaceFileIfChanged(outputprefix + ".cpp", root.define())

    if webidlfile in generatedEventsWebIDLFiles:
        eventName = webidlfile[:-len(".webidl")]
        generatedEvent = CGEventRoot(config, eventName)
        replaceFileIfChanged(eventName + ".h", generatedEvent.declare())
        replaceFileIfChanged(eventName + ".cpp", generatedEvent.define())

    mk = Makefile()
    # NOTE: it's VERY important that we output dependencies for the FooBinding
    # file here, not for the header or generated cpp file.  These dependencies
    # are used later to properly determine changedDeps and prevent rebuilding
    # too much.  See the comment explaining $(binding_dependency_trackers) in
    # Makefile.in.
    rule = mk.create_rule([outputprefix])
    rule.add_dependencies(
        os.path.join(srcprefix, x) for x in sorted(root.deps()))
    rule.add_dependencies(iter_modules_in_path(topsrcdir))
    with open(depsname, 'w') as f:
        mk.dump(f)
Exemplo n.º 4
0
def generate_binding_files(config, outputprefix, srcprefix, webidlfile,
                           generatedEventsWebIDLFiles):
    """
    |config| Is the configuration object.
    |outputprefix| is a prefix to use for the header guards and filename.
    """

    depsname = ".deps/" + outputprefix + ".pp"
    root = CGBindingRoot(config, outputprefix, webidlfile)
    replaceFileIfChanged(outputprefix + ".h", root.declare())
    replaceFileIfChanged(outputprefix + ".cpp", root.define())

    if webidlfile in generatedEventsWebIDLFiles:
        eventName = webidlfile[:-len(".webidl")]
        generatedEvent = CGEventRoot(config, eventName)
        replaceFileIfChanged(eventName + ".h", generatedEvent.declare())
        replaceFileIfChanged(eventName + ".cpp", generatedEvent.define())

    mk = Makefile()
    # NOTE: it's VERY important that we output dependencies for the FooBinding
    # file here, not for the header or generated cpp file.  These dependencies
    # are used later to properly determine changedDeps and prevent rebuilding
    # too much.  See the comment explaining $(binding_dependency_trackers) in
    # Makefile.in.
    rule = mk.create_rule([outputprefix])
    rule.add_dependencies(os.path.join(srcprefix, x) for x in root.deps())
    rule.add_dependencies(iter_modules_in_path(topsrcdir))
    with open(depsname, 'w') as f:
        mk.dump(f)
Exemplo n.º 5
0
def test_iter_modules_in_path():
    tests_path = os.path.normcase(os.path.dirname(__file__))
    paths = list(iter_modules_in_path(tests_path))
    assert set(paths) == set([
        os.path.join(os.path.abspath(tests_path), "__init__.py"),
        os.path.join(os.path.abspath(tests_path), "test_pythonutil.py"),
    ])
Exemplo n.º 6
0
 def test_iter_modules_in_path(self):
     mozbuild_path = os.path.dirname(os.path.dirname(__file__))
     paths = list(iter_modules_in_path(mozbuild_path))
     self.assertEquals(sorted(paths), [
         os.path.join(os.path.abspath(mozbuild_path), '__init__.py'),
         os.path.join(os.path.abspath(mozbuild_path), 'pythonutil.py'),
         os.path.join(os.path.abspath(mozbuild_path), 'test', 'test_pythonutil.py'),
     ])
Exemplo n.º 7
0
def process(input_dirs, inc_paths, bindings_conf, cache_dir, header_dir,
            xpcrs_dir, xpt_dir, deps_dir, module, idl_files):
    p = IDLParser(outputdir=cache_dir)

    xpts = []
    mk = Makefile()
    rule = mk.create_rule()

    glbl = {}
    execfile(bindings_conf, glbl)
    webidlconfig = glbl['DOMInterfaces']

    # Write out dependencies for Python modules we import. If this list isn't
    # up to date, we will not re-process XPIDL files if the processor changes.
    rule.add_dependencies(six.ensure_text(s) for s in
                          iter_modules_in_path(topsrcdir))

    for path in idl_files:
        basename = os.path.basename(path)
        stem, _ = os.path.splitext(basename)
        idl_data = open(path).read()

        idl = p.parse(idl_data, filename=path)
        idl.resolve(inc_paths, p, webidlconfig)

        header_path = os.path.join(header_dir, '%s.h' % stem)
        rs_rt_path = os.path.join(xpcrs_dir, 'rt', '%s.rs' % stem)
        rs_bt_path = os.path.join(xpcrs_dir, 'bt', '%s.rs' % stem)

        xpts.append(jsonxpt.build_typelib(idl))

        rule.add_dependencies(six.ensure_text(s) for s in idl.deps)

        with FileAvoidWrite(header_path) as fh:
            print_header(idl, fh, path)

        with FileAvoidWrite(rs_rt_path) as fh:
            print_rust_bindings(idl, fh, path)

        with FileAvoidWrite(rs_bt_path) as fh:
            print_rust_macros_bindings(idl, fh, path)

    # NOTE: We don't use FileAvoidWrite here as we may re-run this code due to a
    # number of different changes in the code, which may not cause the .xpt
    # files to be changed in any way. This means that make will re-run us every
    # time a build is run whether or not anything changed. To fix this we
    # unconditionally write out the file.
    xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
    with open(xpt_path, 'w') as fh:
        jsonxpt.write(jsonxpt.link(xpts), fh)

    rule.add_targets([six.ensure_text(xpt_path)])
    if deps_dir:
        deps_path = os.path.join(deps_dir, '%s.pp' % module)
        with FileAvoidWrite(deps_path) as fh:
            mk.dump(fh)
Exemplo n.º 8
0
 def test_iter_modules_in_path(self):
     mozbuild_path = os.path.normcase(
         os.path.dirname(os.path.dirname(__file__)))
     paths = list(iter_modules_in_path(mozbuild_path))
     self.assertEquals(sorted(paths), [
         os.path.join(os.path.abspath(mozbuild_path), '__init__.py'),
         os.path.join(os.path.abspath(mozbuild_path), 'pythonutil.py'),
         os.path.join(os.path.abspath(mozbuild_path), 'test',
                      'test_pythonutil.py'),
     ])
Exemplo n.º 9
0
def process(input_dir, inc_paths, bindings_conf, cache_dir, header_dir,
            xpcrs_dir, xpt_dir, deps_dir, module, stems):
    p = IDLParser(outputdir=cache_dir)

    xpts = []
    mk = Makefile()
    rule = mk.create_rule()

    glbl = {}
    execfile(bindings_conf, glbl)
    webidlconfig = glbl['DOMInterfaces']

    # Write out dependencies for Python modules we import. If this list isn't
    # up to date, we will not re-process XPIDL files if the processor changes.
    rule.add_dependencies(iter_modules_in_path(topsrcdir))

    for stem in stems:
        path = os.path.join(input_dir, '%s.idl' % stem)
        idl_data = open(path).read()

        idl = p.parse(idl_data, filename=path)
        idl.resolve([input_dir] + inc_paths, p, webidlconfig)

        header_path = os.path.join(header_dir, '%s.h' % stem)
        rs_rt_path = os.path.join(xpcrs_dir, 'rt', '%s.rs' % stem)
        rs_bt_path = os.path.join(xpcrs_dir, 'bt', '%s.rs' % stem)

        xpts.append(jsonxpt.build_typelib(idl))

        rule.add_dependencies(idl.deps)

        with FileAvoidWrite(header_path) as fh:
            print_header(idl, fh, path)

        with FileAvoidWrite(rs_rt_path) as fh:
            print_rust_bindings(idl, fh, path)

        with FileAvoidWrite(rs_bt_path) as fh:
            print_rust_macros_bindings(idl, fh, path)

    xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
    with FileAvoidWrite(xpt_path) as fh:
        jsonxpt.write(jsonxpt.link(xpts), fh)

    rule.add_targets([xpt_path])
    if deps_dir:
        deps_path = os.path.join(deps_dir, '%s.pp' % module)
        with FileAvoidWrite(deps_path) as fh:
            mk.dump(fh)
Exemplo n.º 10
0
def process(input_dir, inc_paths, cache_dir, header_dir, xpcrs_dir,
            xpt_dir, deps_dir, module, stems):
    p = IDLParser(outputdir=cache_dir)

    xpts = {}
    mk = Makefile()
    rule = mk.create_rule()

    # Write out dependencies for Python modules we import. If this list isn't
    # up to date, we will not re-process XPIDL files if the processor changes.
    rule.add_dependencies(iter_modules_in_path(topsrcdir))

    for stem in stems:
        path = os.path.join(input_dir, '%s.idl' % stem)
        idl_data = open(path).read()

        idl = p.parse(idl_data, filename=path)
        idl.resolve([input_dir] + inc_paths, p)

        header_path = os.path.join(header_dir, '%s.h' % stem)
        rs_rt_path = os.path.join(xpcrs_dir, 'rt', '%s.rs' % stem)
        rs_bt_path = os.path.join(xpcrs_dir, 'bt', '%s.rs' % stem)

        xpt = BytesIO()
        write_typelib(idl, xpt, path)
        xpt.seek(0)
        xpts[stem] = xpt

        rule.add_dependencies(idl.deps)

        with FileAvoidWrite(header_path) as fh:
            print_header(idl, fh, path)

        with FileAvoidWrite(rs_rt_path) as fh:
            print_rust_bindings(idl, fh, path)

        with FileAvoidWrite(rs_bt_path) as fh:
            print_rust_macros_bindings(idl, fh, path)

    # TODO use FileAvoidWrite once it supports binary mode.
    xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
    xpt_link(xpts.values()).write(xpt_path)

    rule.add_targets([xpt_path])
    if deps_dir:
        deps_path = os.path.join(deps_dir, '%s.pp' % module)
        with FileAvoidWrite(deps_path) as fh:
            mk.dump(fh)
Exemplo n.º 11
0
def generate_binding_files(config, outputprefix, srcprefix, webidlfile):
    """
    |config| Is the configuration object.
    |outputprefix| is a prefix to use for the header guards and filename.
    """

    depsname = ".deps/" + outputprefix + ".pp"
    root = CGBindingRoot(config, outputprefix, webidlfile)
    replaceFileIfChanged(outputprefix + ".h", root.declare())
    replaceFileIfChanged(outputprefix + ".cpp", root.define())

    mk = Makefile()
    rule = mk.create_rule([outputprefix + '.h', outputprefix + '.cpp'])
    rule.add_dependencies(os.path.join(srcprefix, x) for x in root.deps())
    rule.add_dependencies(iter_modules_in_path(topsrcdir))
    with open(depsname, 'w') as f:
        mk.dump(f)
Exemplo n.º 12
0
def generate_binding_files(config, outputprefix, srcprefix, webidlfile):
    """
    |config| Is the configuration object.
    |outputprefix| is a prefix to use for the header guards and filename.
    """

    depsname = ".deps/" + outputprefix + ".pp"
    root = CGBindingRoot(config, outputprefix, webidlfile)
    replaceFileIfChanged(outputprefix + ".h", root.declare())
    replaceFileIfChanged(outputprefix + ".cpp", root.define())

    mk = Makefile()
    rule = mk.create_rule([outputprefix + ".h", outputprefix + ".cpp"])
    rule.add_dependencies(os.path.join(srcprefix, x) for x in root.deps())
    rule.add_dependencies(iter_modules_in_path(topsrcdir))
    with open(depsname, "w") as f:
        mk.dump(f)
Exemplo n.º 13
0
def config_status(config, execute=True):
    # Sanitize config data to feed config.status
    # Ideally, all the backend and frontend code would handle the booleans, but
    # there are so many things involved, that it's easier to keep config.status
    # untouched for now.
    def sanitize_config(v):
        if v is True:
            return "1"
        if v is False:
            return ""
        # Serialize types that look like lists and tuples as lists.
        if not isinstance(v, (bytes, six.text_type, dict)) and isinstance(
                v, Iterable):
            return list(v)
        return v

    sanitized_config = {}
    sanitized_config["substs"] = {
        k: sanitize_config(v)
        for k, v in six.iteritems(config) if k not in (
            "DEFINES",
            "TOPSRCDIR",
            "TOPOBJDIR",
            "CONFIG_STATUS_DEPS",
            "OLD_CONFIGURE_SUBSTS",
            "OLD_CONFIGURE_DEFINES",
        )
    }
    for k, v in config["OLD_CONFIGURE_SUBSTS"]:
        sanitized_config["substs"][k] = sanitize_config(v)
    sanitized_config["defines"] = {
        k: sanitize_config(v)
        for k, v in six.iteritems(config["DEFINES"])
    }
    for k, v in config["OLD_CONFIGURE_DEFINES"]:
        sanitized_config["defines"][k] = sanitize_config(v)
    sanitized_config["topsrcdir"] = config["TOPSRCDIR"]
    sanitized_config["topobjdir"] = config["TOPOBJDIR"]
    sanitized_config["mozconfig"] = config.get("MOZCONFIG")

    if not check_unicode(sanitized_config):
        print("Configuration should be all unicode.", file=sys.stderr)
        print("Please file a bug for the above.", file=sys.stderr)
        sys.exit(1)

    # Some values in sanitized_config also have more complex types, such as
    # EnumString, which using when calling config_status would currently
    # break the build, as well as making it inconsistent with re-running
    # config.status, for which they are normalized to plain strings via
    # indented_repr. Likewise for non-dict non-string iterables being
    # converted to lists.
    def normalize(obj):
        if isinstance(obj, dict):
            return {k: normalize(v) for k, v in six.iteritems(obj)}
        if isinstance(obj, six.text_type):
            return six.text_type(obj)
        if isinstance(obj, Iterable):
            return [normalize(o) for o in obj]
        return obj

    sanitized_config = normalize(sanitized_config)

    # Create config.status. Eventually, we'll want to just do the work it does
    # here, when we're able to skip configure tests/use cached results/not rely
    # on autoconf.
    with codecs.open("config.status", "w", "utf-8") as fh:
        fh.write(
            textwrap.dedent("""\
            #!%(python)s
            # coding=utf-8
            from __future__ import unicode_literals
        """) % {"python": config["PYTHON3"]})
        for k, v in sorted(six.iteritems(sanitized_config)):
            fh.write("%s = " % k)
            write_indented_repr(fh, v)
        fh.write("__all__ = ['topobjdir', 'topsrcdir', 'defines', "
                 "'substs', 'mozconfig']")

        if execute:
            fh.write(
                textwrap.dedent("""
                if __name__ == '__main__':
                    from mozbuild.util import patch_main
                    patch_main()
                    from mozbuild.config_status import config_status
                    args = dict([(name, globals()[name]) for name in __all__])
                    config_status(**args)
            """))

    partial_config = PartialConfigEnvironment(config["TOPOBJDIR"])
    partial_config.write_vars(sanitized_config)

    # Write out a file so the build backend knows to re-run configure when
    # relevant Python changes.
    with io.open("config_status_deps.in", "w", encoding="utf-8",
                 newline="\n") as fh:
        for f in sorted(
                itertools.chain(
                    config["CONFIG_STATUS_DEPS"],
                    iter_modules_in_path(config["TOPOBJDIR"],
                                         config["TOPSRCDIR"]),
                )):
            fh.write("%s\n" % mozpath.normpath(f))

    # Other things than us are going to run this file, so we need to give it
    # executable permissions.
    os.chmod("config.status", 0o755)
    if execute:
        from mozbuild.config_status import config_status

        return config_status(args=[], **sanitized_config)
    return 0
Exemplo n.º 14
0
def config_status(config):
    # Sanitize config data to feed config.status
    # Ideally, all the backend and frontend code would handle the booleans, but
    # there are so many things involved, that it's easier to keep config.status
    # untouched for now.
    def sanitized_bools(v):
        if v is True:
            return '1'
        if v is False:
            return ''
        return v

    sanitized_config = {}
    sanitized_config['substs'] = {
        k: sanitized_bools(v) for k, v in config.iteritems()
        if k not in ('DEFINES', 'non_global_defines', 'TOPSRCDIR', 'TOPOBJDIR',
                     'CONFIG_STATUS_DEPS')
    }
    sanitized_config['defines'] = {
        k: sanitized_bools(v) for k, v in config['DEFINES'].iteritems()
    }
    sanitized_config['non_global_defines'] = config['non_global_defines']
    sanitized_config['topsrcdir'] = config['TOPSRCDIR']
    sanitized_config['topobjdir'] = config['TOPOBJDIR']
    sanitized_config['mozconfig'] = config.get('MOZCONFIG')

    # Create config.status. Eventually, we'll want to just do the work it does
    # here, when we're able to skip configure tests/use cached results/not rely
    # on autoconf.
    logging.getLogger('moz.configure').info('Creating config.status')
    encoding = 'mbcs' if sys.platform == 'win32' else 'utf-8'
    with codecs.open('config.status', 'w', encoding) as fh:
        fh.write(textwrap.dedent('''\
            #!%(python)s
            # coding=%(encoding)s
            from __future__ import unicode_literals
            from mozbuild.util import encode
            encoding = '%(encoding)s'
        ''') % {'python': config['PYTHON'], 'encoding': encoding})
        # A lot of the build backend code is currently expecting byte
        # strings and breaks in subtle ways with unicode strings. (bug 1296508)
        for k, v in sanitized_config.iteritems():
            fh.write('%s = encode(%s, encoding)\n' % (k, indented_repr(v)))
        fh.write("__all__ = ['topobjdir', 'topsrcdir', 'defines', "
                 "'non_global_defines', 'substs', 'mozconfig']")

        if config.get('MOZ_BUILD_APP') != 'js' or config.get('JS_STANDALONE'):
            fh.write(textwrap.dedent('''
                if __name__ == '__main__':
                    from mozbuild.util import patch_main
                    patch_main()
                    from mozbuild.config_status import config_status
                    args = dict([(name, globals()[name]) for name in __all__])
                    config_status(**args)
            '''))

    partial_config = PartialConfigEnvironment(config['TOPOBJDIR'])
    partial_config.write_vars(sanitized_config)

    # Write out a file so the build backend knows to re-run configure when
    # relevant Python changes.
    with open('config_status_deps.in', 'w') as fh:
        for f in itertools.chain(config['CONFIG_STATUS_DEPS'],
                                 iter_modules_in_path(config['TOPOBJDIR'],
                                                      config['TOPSRCDIR'])):
            fh.write('%s\n' % mozpath.normpath(f))

    # Other things than us are going to run this file, so we need to give it
    # executable permissions.
    os.chmod('config.status', 0o755)
    if config.get('MOZ_BUILD_APP') != 'js' or config.get('JS_STANDALONE'):
        from mozbuild.config_status import config_status

        # Some values in sanitized_config also have more complex types, such as
        # EnumString, which using when calling config_status would currently
        # break the build, as well as making it inconsistent with re-running
        # config.status. Fortunately, EnumString derives from unicode, so it's
        # covered by converting unicode strings.

        # A lot of the build backend code is currently expecting byte strings
        # and breaks in subtle ways with unicode strings.
        return config_status(args=[], **encode(sanitized_config, encoding))
    return 0
Exemplo n.º 15
0
def main(argv):
    parser = argparse.ArgumentParser('Generate a file from a Python script',
                                     add_help=False)
    parser.add_argument('python_script', metavar='python-script', type=str,
                        help='The Python script to run')
    parser.add_argument('method_name', metavar='method-name', type=str,
                        help='The method of the script to invoke')
    parser.add_argument('output_file', metavar='output-file', type=str,
                        help='The file to generate')
    parser.add_argument('dep_file', metavar='dep-file', type=str,
                        help='File to write any additional make dependencies to')
    parser.add_argument('additional_arguments', metavar='arg',
                        nargs=argparse.REMAINDER,
                        help="Additional arguments to the script's main() method")

    args = parser.parse_args(argv)

    script = args.python_script
    # Permit the script to import modules from the same directory in which it
    # resides.  The justification for doing this is that if we were invoking
    # the script as:
    #
    #    python script arg1...
    #
    # then importing modules from the script's directory would come for free.
    # Since we're invoking the script in a roundabout way, we provide this
    # bit of convenience.
    sys.path.append(os.path.dirname(script))
    with open(script, 'r') as fh:
        module = imp.load_module('script', fh, script,
                                 ('.py', 'r', imp.PY_SOURCE))
    method = args.method_name
    if not hasattr(module, method):
        print('Error: script "{0}" is missing a {1} method'.format(script, method),
              file=sys.stderr)
        return 1

    ret = 1
    try:
        with FileAvoidWrite(args.output_file) as output:
            ret = module.__dict__[method](output, *args.additional_arguments)
            # The following values indicate a statement of success:
            #  - a set() (see below)
            #  - 0
            #  - False
            #  - None
            #
            # Everything else is an error (so scripts can conveniently |return
            # 1| or similar). If a set is returned, the elements of the set
            # indicate additional dependencies that will be listed in the deps
            # file. Python module imports are automatically included as
            # dependencies.
            if isinstance(ret, set):
                deps = ret
                # The script succeeded, so reset |ret| to indicate that.
                ret = None
            else:
                deps = set()

            # Only write out the dependencies if the script was successful
            if not ret:
                # Add dependencies on any python modules that were imported by
                # the script.
                deps |= set(iter_modules_in_path(buildconfig.topsrcdir,
                                                 buildconfig.topobjdir))
                mk = Makefile()
                mk.create_rule([args.output_file]).add_dependencies(deps)
                with FileAvoidWrite(args.dep_file) as dep_file:
                    mk.dump(dep_file)
        # Even when our file's contents haven't changed, we want to update
        # the file's mtime so make knows this target isn't still older than
        # whatever prerequisite caused it to be built this time around.
        try:
            os.utime(args.output_file, None)
        except:
            print('Error processing file "{0}"'.format(args.output_file),
                  file=sys.stderr)
            traceback.print_exc()
    except IOError as e:
        print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
        traceback.print_exc()
        return 1
    return ret
Exemplo n.º 16
0
syncMsgList = options.syncMsgList
headersdir = options.headersdir
cppdir = options.cppdir
includedirs = [ os.path.abspath(incdir) for incdir in options.includedirs ]

if not len(files):
    op.error("No IPDL files specified")

ipcmessagestartpath = os.path.join(headersdir, 'IPCMessageStart.h')
ipc_msgtype_name_path = os.path.join(cppdir, 'IPCMessageTypeName.cpp')

# Compiling the IPDL files can take a long time, even on a fast machine.
# Check to see whether we need to do any work.
latestipdlmod = max(os.stat(f).st_mtime
                    for f in itertools.chain(files,
                                             iter_modules_in_path(mozpath.dirname(__file__))))

def outputModTime(f):
    # A non-existant file is newer than everything.
    if not os.path.exists(f):
        return 0
    return os.stat(f).st_mtime

# Because the IPDL headers are placed into directories reflecting their
# namespace, collect a list here so we can easily map output names without
# parsing the actual IPDL files themselves.
headersmap = {}
for (path, dirs, headers) in os.walk(headersdir):
    for h in headers:
        base = os.path.basename(h)
        if base in headersmap:
Exemplo n.º 17
0
def main(argv):
    parser = argparse.ArgumentParser("Generate a file from a Python script", add_help=False)
    parser.add_argument("python_script", metavar="python-script", type=str, help="The Python script to run")
    parser.add_argument("method_name", metavar="method-name", type=str, help="The method of the script to invoke")
    parser.add_argument("output_file", metavar="output-file", type=str, help="The file to generate")
    parser.add_argument(
        "dep_file", metavar="dep-file", type=str, help="File to write any additional make dependencies to"
    )
    parser.add_argument(
        "additional_arguments",
        metavar="arg",
        nargs=argparse.REMAINDER,
        help="Additional arguments to the script's main() method",
    )

    args = parser.parse_args(argv)

    script = args.python_script
    # Permit the script to import modules from the same directory in which it
    # resides.  The justification for doing this is that if we were invoking
    # the script as:
    #
    #    python script arg1...
    #
    # then importing modules from the script's directory would come for free.
    # Since we're invoking the script in a roundabout way, we provide this
    # bit of convenience.
    sys.path.append(os.path.dirname(script))
    with open(script, "r") as fh:
        module = imp.load_module("script", fh, script, (".py", "r", imp.PY_SOURCE))
    method = args.method_name
    if not hasattr(module, method):
        print('Error: script "{0}" is missing a {1} method'.format(script, method), file=sys.stderr)
        return 1

    ret = 1
    try:
        with FileAvoidWrite(args.output_file) as output:
            ret = module.__dict__[method](output, *args.additional_arguments)
            # We treat sets as a statement of success.  Everything else
            # is an error (so scripts can conveniently |return 1| or
            # similar).
            if isinstance(ret, set) and ret:
                ret |= set(iter_modules_in_path(buildconfig.topsrcdir, buildconfig.topobjdir))
                mk = Makefile()
                mk.create_rule([args.output_file]).add_dependencies(ret)
                with FileAvoidWrite(args.dep_file) as dep_file:
                    mk.dump(dep_file)
                # The script succeeded, so reset |ret| to indicate that.
                ret = None
        # Even when our file's contents haven't changed, we want to update
        # the file's mtime so make knows this target isn't still older than
        # whatever prerequisite caused it to be built this time around.
        try:
            os.utime(args.output_file, None)
        except:
            print('Error processing file "{0}"'.format(args.output_file), file=sys.stderr)
            traceback.print_exc()
    except IOError as e:
        print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
        traceback.print_exc()
        return 1
    return ret
Exemplo n.º 18
0
def config_status(config):
    # Sanitize config data to feed config.status
    # Ideally, all the backend and frontend code would handle the booleans, but
    # there are so many things involved, that it's easier to keep config.status
    # untouched for now.
    def sanitized_bools(v):
        if v is True:
            return '1'
        if v is False:
            return ''
        return v

    sanitized_config = {}
    sanitized_config['substs'] = {
        k: sanitized_bools(v) for k, v in six.iteritems(config)
        if k not in ('DEFINES', 'non_global_defines', 'TOPSRCDIR', 'TOPOBJDIR',
                     'CONFIG_STATUS_DEPS')
    }
    sanitized_config['defines'] = {
        k: sanitized_bools(v) for k, v in six.iteritems(config['DEFINES'])
    }
    sanitized_config['non_global_defines'] = config['non_global_defines']
    sanitized_config['topsrcdir'] = config['TOPSRCDIR']
    sanitized_config['topobjdir'] = config['TOPOBJDIR']
    sanitized_config['mozconfig'] = config.get('MOZCONFIG')

    if not check_unicode(sanitized_config):
        print("Configuration should be all unicode.", file=sys.stderr)
        print("Please file a bug for the above.", file=sys.stderr)
        sys.exit(1)

    # Create config.status. Eventually, we'll want to just do the work it does
    # here, when we're able to skip configure tests/use cached results/not rely
    # on autoconf.
    logging.getLogger('moz.configure').info('Creating config.status')
    with codecs.open('config.status', 'w', 'utf-8') as fh:
        fh.write(textwrap.dedent('''\
            #!%(python)s
            # coding=utf-8
            from __future__ import unicode_literals
        ''') % {'python': config['PYTHON']})
        for k, v in six.iteritems(sanitized_config):
            fh.write('%s = %s\n' % (k, indented_repr(v)))
        fh.write("__all__ = ['topobjdir', 'topsrcdir', 'defines', "
                 "'non_global_defines', 'substs', 'mozconfig']")

        if config.get('MOZ_BUILD_APP') != 'js' or config.get('JS_STANDALONE'):
            fh.write(textwrap.dedent('''
                if __name__ == '__main__':
                    from mozbuild.util import patch_main
                    patch_main()
                    from mozbuild.config_status import config_status
                    args = dict([(name, globals()[name]) for name in __all__])
                    config_status(**args)
            '''))

    partial_config = PartialConfigEnvironment(config['TOPOBJDIR'])
    partial_config.write_vars(sanitized_config)

    # Write out a file so the build backend knows to re-run configure when
    # relevant Python changes.
    with open('config_status_deps.in', 'w') as fh:
        for f in itertools.chain(config['CONFIG_STATUS_DEPS'],
                                 iter_modules_in_path(config['TOPOBJDIR'],
                                                      config['TOPSRCDIR'])):
            fh.write('%s\n' % mozpath.normpath(f))

    # Other things than us are going to run this file, so we need to give it
    # executable permissions.
    os.chmod('config.status', 0o755)
    if config.get('MOZ_BUILD_APP') != 'js' or config.get('JS_STANDALONE'):
        from mozbuild.config_status import config_status

        # Some values in sanitized_config also have more complex types, such as
        # EnumString, which using when calling config_status would currently
        # break the build, as well as making it inconsistent with re-running
        # config.status, for which they are normalized to plain strings via
        # indented_repr. Likewise for non-dict non-string iterables being
        # converted to lists.
        def normalize(obj):
            if isinstance(obj, dict):
                return {
                    k: normalize(v)
                    for k, v in six.iteritems(obj)
                }
            if isinstance(obj, six.text_type):
                return six.text_type(obj)
            if isinstance(obj, Iterable):
                return [normalize(o) for o in obj]
            return obj
        return config_status(args=[], **normalize(sanitized_config))
    return 0
Exemplo n.º 19
0
def main(argv):
    parser = argparse.ArgumentParser('Generate a file from a Python script',
                                     add_help=False)
    parser.add_argument('python_script',
                        metavar='python-script',
                        type=str,
                        help='The Python script to run')
    parser.add_argument('method_name',
                        metavar='method-name',
                        type=str,
                        help='The method of the script to invoke')
    parser.add_argument('output_file',
                        metavar='output-file',
                        type=str,
                        help='The file to generate')
    parser.add_argument(
        'dep_file',
        metavar='dep-file',
        type=str,
        help='File to write any additional make dependencies to')
    parser.add_argument(
        'additional_arguments',
        metavar='arg',
        nargs='*',
        help="Additional arguments to the script's main() method")

    args = parser.parse_args(argv)

    script = args.python_script
    # Permit the script to import modules from the same directory in which it
    # resides.  The justification for doing this is that if we were invoking
    # the script as:
    #
    #    python script arg1...
    #
    # then importing modules from the script's directory would come for free.
    # Since we're invoking the script in a roundabout way, we provide this
    # bit of convenience.
    sys.path.append(os.path.dirname(script))
    with open(script, 'r') as fh:
        module = imp.load_module('script', fh, script,
                                 ('.py', 'r', imp.PY_SOURCE))
    method = args.method_name
    if not hasattr(module, method):
        print('Error: script "{0}" is missing a {1} method'.format(
            script, method),
              file=sys.stderr)
        return 1

    ret = 1
    try:
        with FileAvoidWrite(args.output_file) as output:
            ret = module.__dict__[method](output, *args.additional_arguments)
            # We treat sets as a statement of success.  Everything else
            # is an error (so scripts can conveniently |return 1| or
            # similar).
            if isinstance(ret, set) and ret:
                ret |= set(
                    iter_modules_in_path(buildconfig.topsrcdir,
                                         buildconfig.topobjdir))
                mk = Makefile()
                mk.create_rule([args.output_file]).add_dependencies(ret)
                with FileAvoidWrite(args.dep_file) as dep_file:
                    mk.dump(dep_file)
                # The script succeeded, so reset |ret| to indicate that.
                ret = None
        # Even when our file's contents haven't changed, we want to update
        # the file's mtime so make knows this target isn't still older than
        # whatever prerequisite caused it to be built this time around.
        try:
            os.utime(args.output_file, None)
        except:
            print('Error processing file "{0}"'.format(args.output_file),
                  file=sys.stderr)
            traceback.print_exc()
    except IOError as e:
        print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
        traceback.print_exc()
        return 1
    return ret
Exemplo n.º 20
0
def process(input_dirs, inc_paths, bindings_conf, header_dir, xpcrs_dir,
            xpt_dir, deps_dir, module, idl_files):
    p = IDLParser()

    xpts = []
    mk = Makefile()
    rule = mk.create_rule()

    glbl = {}
    exec(open(bindings_conf, encoding='utf-8').read(), glbl)
    webidlconfig = glbl['DOMInterfaces']

    # Write out dependencies for Python modules we import. If this list isn't
    # up to date, we will not re-process XPIDL files if the processor changes.
    rule.add_dependencies(
        six.ensure_text(s) for s in iter_modules_in_path(topsrcdir))

    for path in idl_files:
        basename = os.path.basename(path)
        stem, _ = os.path.splitext(basename)
        idl_data = open(path, encoding='utf-8').read()

        idl = p.parse(idl_data, filename=path)
        idl.resolve(inc_paths, p, webidlconfig)

        header_path = os.path.join(header_dir, '%s.h' % stem)
        rs_rt_path = os.path.join(xpcrs_dir, 'rt', '%s.rs' % stem)
        rs_bt_path = os.path.join(xpcrs_dir, 'bt', '%s.rs' % stem)

        xpts.append(jsonxpt.build_typelib(idl))

        rule.add_dependencies(six.ensure_text(s) for s in idl.deps)

        # The print_* functions don't actually do anything with the
        # passed-in path other than writing it into the file to let people
        # know where the original source was.  This script receives
        # absolute paths, which are not so great to embed in header files
        # (they mess with deterministic generation of files on different
        # machines, Searchfox logic, shared compilation caches, etc.), so
        # we pass in fake paths that are the same across compilations, but
        # should still enable people to figure out where to go.
        relpath = mozpath.relpath(path, topsrcdir)

        with FileAvoidWrite(header_path) as fh:
            print_header(idl, fh, path, relpath)

        with FileAvoidWrite(rs_rt_path) as fh:
            print_rust_bindings(idl, fh, relpath)

        with FileAvoidWrite(rs_bt_path) as fh:
            print_rust_macros_bindings(idl, fh, relpath)

    # NOTE: We don't use FileAvoidWrite here as we may re-run this code due to a
    # number of different changes in the code, which may not cause the .xpt
    # files to be changed in any way. This means that make will re-run us every
    # time a build is run whether or not anything changed. To fix this we
    # unconditionally write out the file.
    xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
    with open(xpt_path, 'w', encoding='utf-8', newline='\n') as fh:
        jsonxpt.write(jsonxpt.link(xpts), fh)

    rule.add_targets([six.ensure_text(xpt_path)])
    if deps_dir:
        deps_path = os.path.join(deps_dir, '%s.pp' % module)
        with FileAvoidWrite(deps_path) as fh:
            mk.dump(fh)
Exemplo n.º 21
0
def main(argv):
    parser = argparse.ArgumentParser('Generate a file from a Python script',
                                     add_help=False)
    parser.add_argument('--locale', metavar='locale', type=str,
                        help='The locale in use.')
    parser.add_argument('python_script', metavar='python-script', type=str,
                        help='The Python script to run')
    parser.add_argument('method_name', metavar='method-name', type=str,
                        help='The method of the script to invoke')
    parser.add_argument('output_file', metavar='output-file', type=str,
                        help='The file to generate')
    parser.add_argument('dep_file', metavar='dep-file', type=str,
                        help='File to write any additional make dependencies to')
    parser.add_argument('dep_target', metavar='dep-target', type=str,
                        help='Make target to use in the dependencies file')
    parser.add_argument('additional_arguments', metavar='arg',
                        nargs=argparse.REMAINDER,
                        help="Additional arguments to the script's main() method")

    args = parser.parse_args(argv)

    kwargs = {}
    if args.locale:
        kwargs['locale'] = args.locale
    script = args.python_script
    # Permit the script to import modules from the same directory in which it
    # resides.  The justification for doing this is that if we were invoking
    # the script as:
    #
    #    python script arg1...
    #
    # then importing modules from the script's directory would come for free.
    # Since we're invoking the script in a roundabout way, we provide this
    # bit of convenience.
    sys.path.append(os.path.dirname(script))
    with open(script, 'r') as fh:
        module = imp.load_module('script', fh, script,
                                 ('.py', 'r', imp.PY_SOURCE))
    method = args.method_name
    if not hasattr(module, method):
        print('Error: script "{0}" is missing a {1} method'.format(script, method),
              file=sys.stderr)
        return 1

    ret = 1
    try:
        with FileAvoidWrite(args.output_file, mode='rb') as output:
            try:
                ret = module.__dict__[method](output, *args.additional_arguments, **kwargs)
            except Exception:
                # Ensure that we don't overwrite the file if the script failed.
                output.avoid_writing_to_file()
                raise

            # The following values indicate a statement of success:
            #  - a set() (see below)
            #  - 0
            #  - False
            #  - None
            #
            # Everything else is an error (so scripts can conveniently |return
            # 1| or similar). If a set is returned, the elements of the set
            # indicate additional dependencies that will be listed in the deps
            # file. Python module imports are automatically included as
            # dependencies.
            if isinstance(ret, set):
                deps = ret
                # The script succeeded, so reset |ret| to indicate that.
                ret = None
            else:
                deps = set()

            # Only write out the dependencies if the script was successful
            if not ret:
                # Add dependencies on any python modules that were imported by
                # the script.
                deps |= set(iter_modules_in_path(buildconfig.topsrcdir,
                                                 buildconfig.topobjdir))
                # Add dependencies on any buildconfig items that were accessed
                # by the script.
                deps |= set(buildconfig.get_dependencies())

                mk = Makefile()
                mk.create_rule([args.dep_target]).add_dependencies(deps)
                with FileAvoidWrite(args.dep_file) as dep_file:
                    mk.dump(dep_file)
            else:
                # Ensure that we don't overwrite the file if the script failed.
                output.avoid_writing_to_file()

    except IOError as e:
        print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
        traceback.print_exc()
        return 1
    return ret
Exemplo n.º 22
0
def config_status(config):
    # Sanitize config data to feed config.status
    # Ideally, all the backend and frontend code would handle the booleans, but
    # there are so many things involved, that it's easier to keep config.status
    # untouched for now.
    def sanitized_bools(v):
        if v is True:
            return '1'
        if v is False:
            return ''
        return v

    sanitized_config = {}
    sanitized_config['substs'] = {
        k: sanitized_bools(v)
        for k, v in config.iteritems()
        if k not in ('DEFINES', 'non_global_defines', 'TOPSRCDIR', 'TOPOBJDIR',
                     'ALL_CONFIGURE_PATHS')
    }
    sanitized_config['defines'] = {
        k: sanitized_bools(v)
        for k, v in config['DEFINES'].iteritems()
    }
    sanitized_config['non_global_defines'] = config['non_global_defines']
    sanitized_config['topsrcdir'] = config['TOPSRCDIR']
    sanitized_config['topobjdir'] = config['TOPOBJDIR']
    sanitized_config['mozconfig'] = config.get('MOZCONFIG')

    # Create config.status. Eventually, we'll want to just do the work it does
    # here, when we're able to skip configure tests/use cached results/not rely
    # on autoconf.
    print("Creating config.status", file=sys.stderr)
    encoding = 'mbcs' if sys.platform == 'win32' else 'utf-8'
    with codecs.open('config.status', 'w', encoding) as fh:
        fh.write(
            textwrap.dedent('''\
            #!%(python)s
            # coding=%(encoding)s
            from __future__ import unicode_literals
            from mozbuild.util import encode
            encoding = '%(encoding)s'
        ''') % {
                'python': config['PYTHON'],
                'encoding': encoding
            })
        # A lot of the build backend code is currently expecting byte
        # strings and breaks in subtle ways with unicode strings. (bug 1296508)
        for k, v in sanitized_config.iteritems():
            fh.write('%s = encode(%s, encoding)\n' % (k, indented_repr(v)))
        fh.write("__all__ = ['topobjdir', 'topsrcdir', 'defines', "
                 "'non_global_defines', 'substs', 'mozconfig']")

        if config.get('MOZ_BUILD_APP') != 'js' or config.get('JS_STANDALONE'):
            fh.write(
                textwrap.dedent('''
                if __name__ == '__main__':
                    from mozbuild.util import patch_main
                    patch_main()
                    from mozbuild.config_status import config_status
                    args = dict([(name, globals()[name]) for name in __all__])
                    config_status(**args)
            '''))

    partial_config = PartialConfigEnvironment(config['TOPOBJDIR'])
    partial_config.write_vars(sanitized_config)

    # Write out a depfile so Make knows to re-run configure when relevant Python
    # changes.
    mk = Makefile()
    rule = mk.create_rule()
    rule.add_targets(["%s/config.status" % config['TOPOBJDIR']])
    rule.add_dependencies(
        itertools.chain(
            config['ALL_CONFIGURE_PATHS'],
            iter_modules_in_path(config['TOPOBJDIR'], config['TOPSRCDIR'])))
    with open('configure.d', 'w') as fh:
        mk.dump(fh)

    # Other things than us are going to run this file, so we need to give it
    # executable permissions.
    os.chmod('config.status', 0o755)
    if config.get('MOZ_BUILD_APP') != 'js' or config.get('JS_STANDALONE'):
        from mozbuild.config_status import config_status

        # Some values in sanitized_config also have more complex types, such as
        # EnumString, which using when calling config_status would currently
        # break the build, as well as making it inconsistent with re-running
        # config.status. Fortunately, EnumString derives from unicode, so it's
        # covered by converting unicode strings.

        # A lot of the build backend code is currently expecting byte strings
        # and breaks in subtle ways with unicode strings.
        return config_status(args=[], **encode(sanitized_config, encoding))
    return 0
Exemplo n.º 23
0
msgMetadata = options.msgMetadata
headersdir = options.headersdir
cppdir = options.cppdir
includedirs = [os.path.abspath(incdir) for incdir in options.includedirs]

if not len(files):
    op.error("No IPDL files specified")

ipcmessagestartpath = os.path.join(headersdir, 'IPCMessageStart.h')
ipc_msgtype_name_path = os.path.join(cppdir, 'IPCMessageTypeName.cpp')

# Compiling the IPDL files can take a long time, even on a fast machine.
# Check to see whether we need to do any work.
latestipdlmod = max(
    os.stat(f).st_mtime for f in itertools.chain(
        files, iter_modules_in_path(mozpath.dirname(__file__))))


def outputModTime(f):
    # A non-existant file is newer than everything.
    if not os.path.exists(f):
        return 0
    return os.stat(f).st_mtime


# Because the IPDL headers are placed into directories reflecting their
# namespace, collect a list here so we can easily map output names without
# parsing the actual IPDL files themselves.
headersmap = {}
for (path, dirs, headers) in os.walk(headersdir):
    for h in headers: