Exemplo n.º 1
0
    def __init__(self, topsrcdir, topobjdir, dist, group=Grouping.NO,
                 abspaths=False):
        topsrcdir = mozpath.normsep(os.path.normcase(os.path.abspath(topsrcdir)))
        topobjdir = mozpath.normsep(os.path.normcase(os.path.abspath(topobjdir)))
        dist = mozpath.normsep(os.path.normcase(os.path.abspath(dist)))
        if abspaths:
            topsrcdir_value = topsrcdir
            topobjdir_value = topobjdir
            dist_value = dist
        else:
            topsrcdir_value = '$(topsrcdir)'
            topobjdir_value = '$(DEPTH)'
            dist_value = '$(DIST)'

        self._normpaths = {
            topsrcdir: topsrcdir_value,
            topobjdir: topobjdir_value,
            dist: dist_value,
            '$(topsrcdir)': topsrcdir_value,
            '$(DEPTH)': topobjdir_value,
            '$(DIST)': dist_value,
            '$(depth)': topobjdir_value, # normcase may lowercase variable refs when
            '$(dist)': dist_value,       # they are in the original dependency file
            mozpath.relpath(topsrcdir, os.curdir): topsrcdir_value,
            mozpath.relpath(topobjdir, os.curdir): topobjdir_value,
            mozpath.relpath(dist, os.curdir): dist_value,
        }

        Makefile.__init__(self)
        self._group = group
        self._targets = OrderedDict()
Exemplo n.º 2
0
def generate_binding_files(config, outputprefix, srcprefix, webidlfile,
                           generatedEventsWebIDLFiles):
    """
    |config| Is the configuration object.
    |outputprefix| is a prefix to use for the header guards and filename.
    """

    depsname = ".deps/" + outputprefix + ".pp"
    root = CGBindingRoot(config, outputprefix, webidlfile)
    replaceFileIfChanged(outputprefix + ".h", root.declare())
    replaceFileIfChanged(outputprefix + ".cpp", root.define())

    if webidlfile in generatedEventsWebIDLFiles:
        eventName = webidlfile[:-len(".webidl")]
        generatedEvent = CGEventRoot(config, eventName)
        replaceFileIfChanged(eventName + ".h", generatedEvent.declare())
        replaceFileIfChanged(eventName + ".cpp", generatedEvent.define())

    mk = Makefile()
    # NOTE: it's VERY important that we output dependencies for the FooBinding
    # file here, not for the header or generated cpp file.  These dependencies
    # are used later to properly determine changedDeps and prevent rebuilding
    # too much.  See the comment explaining $(binding_dependency_trackers) in
    # Makefile.in.
    rule = mk.create_rule([outputprefix])
    rule.add_dependencies(os.path.join(srcprefix, x) for x in root.deps())
    rule.add_dependencies(iter_modules_in_path(topsrcdir))
    with open(depsname, 'w') as f:
        mk.dump(f)
Exemplo n.º 3
0
def generate_binding_files(config, outputprefix, srcprefix, webidlfile,
                           generatedEventsWebIDLFiles):
    """
    |config| Is the configuration object.
    |outputprefix| is a prefix to use for the header guards and filename.
    """

    depsname = ".deps/" + outputprefix + ".pp"
    root = CGBindingRoot(config, outputprefix, webidlfile)
    replaceFileIfChanged(outputprefix + ".h", root.declare())
    replaceFileIfChanged(outputprefix + ".cpp", root.define())

    if webidlfile in generatedEventsWebIDLFiles:
        eventName = webidlfile[:-len(".webidl")]
        generatedEvent = CGEventRoot(config, eventName)
        replaceFileIfChanged(eventName + ".h", generatedEvent.declare())
        replaceFileIfChanged(eventName + ".cpp", generatedEvent.define())

    mk = Makefile()
    # NOTE: it's VERY important that we output dependencies for the FooBinding
    # file here, not for the header or generated cpp file.  These dependencies
    # are used later to properly determine changedDeps and prevent rebuilding
    # too much.  See the comment explaining $(binding_dependency_trackers) in
    # Makefile.in.
    rule = mk.create_rule([outputprefix])
    rule.add_dependencies(
        os.path.join(srcprefix, x) for x in sorted(root.deps()))
    rule.add_dependencies(iter_modules_in_path(topsrcdir))
    with open(depsname, 'w') as f:
        mk.dump(f)
Exemplo n.º 4
0
 def gen_depfile(self, name, rules):
     mk = Makefile()
     for target, deps in rules.items():
         mk.create_rule([target]).add_dependencies(deps)
     depfile = StringIO()
     mk.dump(depfile, removal_guard=True)
     depfile.seek(0)
     depfile.name = name
     return depfile
Exemplo n.º 5
0
def process(input_dirs, inc_paths, bindings_conf, cache_dir, header_dir,
            xpcrs_dir, xpt_dir, deps_dir, module, idl_files):
    p = IDLParser(outputdir=cache_dir)

    xpts = []
    mk = Makefile()
    rule = mk.create_rule()

    glbl = {}
    execfile(bindings_conf, glbl)
    webidlconfig = glbl['DOMInterfaces']

    # Write out dependencies for Python modules we import. If this list isn't
    # up to date, we will not re-process XPIDL files if the processor changes.
    rule.add_dependencies(six.ensure_text(s) for s in
                          iter_modules_in_path(topsrcdir))

    for path in idl_files:
        basename = os.path.basename(path)
        stem, _ = os.path.splitext(basename)
        idl_data = open(path).read()

        idl = p.parse(idl_data, filename=path)
        idl.resolve(inc_paths, p, webidlconfig)

        header_path = os.path.join(header_dir, '%s.h' % stem)
        rs_rt_path = os.path.join(xpcrs_dir, 'rt', '%s.rs' % stem)
        rs_bt_path = os.path.join(xpcrs_dir, 'bt', '%s.rs' % stem)

        xpts.append(jsonxpt.build_typelib(idl))

        rule.add_dependencies(six.ensure_text(s) for s in idl.deps)

        with FileAvoidWrite(header_path) as fh:
            print_header(idl, fh, path)

        with FileAvoidWrite(rs_rt_path) as fh:
            print_rust_bindings(idl, fh, path)

        with FileAvoidWrite(rs_bt_path) as fh:
            print_rust_macros_bindings(idl, fh, path)

    # NOTE: We don't use FileAvoidWrite here as we may re-run this code due to a
    # number of different changes in the code, which may not cause the .xpt
    # files to be changed in any way. This means that make will re-run us every
    # time a build is run whether or not anything changed. To fix this we
    # unconditionally write out the file.
    xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
    with open(xpt_path, 'w') as fh:
        jsonxpt.write(jsonxpt.link(xpts), fh)

    rule.add_targets([six.ensure_text(xpt_path)])
    if deps_dir:
        deps_path = os.path.join(deps_dir, '%s.pp' % module)
        with FileAvoidWrite(deps_path) as fh:
            mk.dump(fh)
 def test_path_normalization(self):
     out = StringIO()
     mk = Makefile()
     rule = mk.create_rule(['c:\\foo'])
     rule.add_dependencies(['c:\\bar', 'c:\\baz\\qux'])
     rule.add_commands(['echo c:\\foo'])
     mk.dump(out)
     self.assertEqual(
         out.getvalue(), 'c:/foo: c:/bar c:/baz/qux\n' +
         '\techo c:\\foo\n' + 'c:/bar c:/baz/qux:\n')
Exemplo n.º 7
0
Arquivo: cl.py Projeto: yzhang90/jsmop
def InvokeClWithDependencyGeneration(cmdline):
    target = ""
    # Figure out what the target is
    for arg in cmdline:
        if arg.startswith("-Fo"):
            target = arg[3:]
            break

    if target == None:
        print >>sys.stderr, "No target set" and sys.exit(1)

    # Assume the source file is the last argument
    source = cmdline[-1]
    assert not source.startswith('-')

    # The deps target lives here
    depstarget = os.path.basename(target) + ".pp"

    cmdline += ['-showIncludes']
    cl = subprocess.Popen(cmdline, stdout=subprocess.PIPE)

    mk = Makefile()
    rule = mk.create_rule([target])
    rule.add_dependencies([normcase(source)])
    for line in cl.stdout:
        # cl -showIncludes prefixes every header with "Note: including file:"
        # and an indentation corresponding to the depth (which we don't need)
        if line.startswith(CL_INCLUDES_PREFIX):
            dep = line[len(CL_INCLUDES_PREFIX):].strip()
            # We can't handle pathes with spaces properly in mddepend.pl, but
            # we can assume that anything in a path with spaces is a system
            # header and throw it away.
            dep = normcase(dep)
            if ' ' not in dep:
                rule.add_dependencies([dep])
        else:
            sys.stdout.write(line) # Make sure we preserve the relevant output
                                   # from cl

    ret = cl.wait()
    if ret != 0 or target == "":
        sys.exit(ret)

    depsdir = os.path.normpath(os.path.join(os.curdir, ".deps"))
    depstarget = os.path.join(depsdir, depstarget)
    if not os.path.isdir(depsdir):
        try:
            os.makedirs(depsdir)
        except OSError:
            pass # This suppresses the error we get when the dir exists, at the
                 # cost of masking failure to create the directory.  We'll just
                 # die on the next line though, so it's not that much of a loss.

    with open(depstarget, "w") as f:
        mk.dump(f)
Exemplo n.º 8
0
def main(argv):
    parser = argparse.ArgumentParser('Generate a file from a Python script',
                                     add_help=False)
    parser.add_argument('python_script', metavar='python-script', type=str,
                        help='The Python script to run')
    parser.add_argument('method_name', metavar='method-name', type=str,
                        help='The method of the script to invoke')
    parser.add_argument('output_file', metavar='output-file', type=str,
                        help='The file to generate')
    parser.add_argument('dep_file', metavar='dep-file', type=str,
                        help='File to write any additional make dependencies to')
    parser.add_argument('additional_arguments', metavar='arg', nargs='*',
                        help="Additional arguments to the script's main() method")

    args = parser.parse_args(argv)

    script = args.python_script
    # Permit the script to import modules from the same directory in which it
    # resides.  The justification for doing this is that if we were invoking
    # the script as:
    #
    #    python script arg1...
    #
    # then importing modules from the script's directory would come for free.
    # Since we're invoking the script in a roundabout way, we provide this
    # bit of convenience.
    sys.path.append(os.path.dirname(script))
    with open(script, 'r') as fh:
        module = imp.load_module('script', fh, script,
                                 ('.py', 'r', imp.PY_SOURCE))
    method = args.method_name
    if not hasattr(module, method):
        print('Error: script "{0}" is missing a {1} method'.format(script, method),
              file=sys.stderr)
        return 1

    ret = 1
    try:
        with FileAvoidWrite(args.output_file) as output:
            ret = module.__dict__[method](output, *args.additional_arguments)
            # We treat sets as a statement of success.  Everything else
            # is an error (so scripts can conveniently |return 1| or
            # similar).
            if isinstance(ret, set) and ret:
                mk = Makefile()
                mk.create_rule([args.output_file]).add_dependencies(ret)
                with FileAvoidWrite(args.dep_file) as dep_file:
                    mk.dump(dep_file)
                # The script succeeded, so reset |ret| to indicate that.
                ret = None
    except IOError as e:
        print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
        traceback.print_exc()
        return 1
    return ret
Exemplo n.º 9
0
def InvokeClWithDependencyGeneration(cmdline):
    target = ""
    # Figure out what the target is
    for arg in cmdline:
        if arg.startswith("-Fo"):
            target = arg[3:]
            break

    if target == None:
        print >> sys.stderr, "No target set" and sys.exit(1)

    # Assume the source file is the last argument
    source = cmdline[-1]
    assert not source.startswith('-')

    # The deps target lives here
    depstarget = os.path.basename(target) + ".pp"

    cmdline += ['-showIncludes']
    cl = subprocess.Popen(cmdline, stdout=subprocess.PIPE)

    mk = Makefile()
    rule = mk.create_rule(target)
    rule.add_dependencies([normcase(source)])
    for line in cl.stdout:
        # cl -showIncludes prefixes every header with "Note: including file:"
        # and an indentation corresponding to the depth (which we don't need)
        if line.startswith(CL_INCLUDES_PREFIX):
            dep = line[len(CL_INCLUDES_PREFIX):].strip()
            # We can't handle pathes with spaces properly in mddepend.pl, but
            # we can assume that anything in a path with spaces is a system
            # header and throw it away.
            if ' ' not in dep:
                rule.add_dependencies([normcase(dep)])
        else:
            sys.stdout.write(line)  # Make sure we preserve the relevant output
            # from cl

    ret = cl.wait()
    if ret != 0 or target == "":
        sys.exit(ret)

    depsdir = os.path.normpath(os.path.join(os.curdir, ".deps"))
    depstarget = os.path.join(depsdir, depstarget)
    if not os.path.isdir(depsdir):
        try:
            os.makedirs(depsdir)
        except OSError:
            pass  # This suppresses the error we get when the dir exists, at the
            # cost of masking failure to create the directory.  We'll just
            # die on the next line though, so it's not that much of a loss.

    with open(depstarget, "w") as f:
        mk.dump(f)
Exemplo n.º 10
0
 def test_statement(self):
     out = StringIO()
     mk = Makefile()
     mk.create_rule(["foo"]).add_dependencies(["bar"]).add_commands(["echo foo"])
     mk.add_statement("BAR = bar")
     mk.create_rule(["$(BAR)"]).add_commands(["echo $@"])
     mk.dump(out, removal_guard=False)
     self.assertEqual(
         out.getvalue(),
         "foo: bar\n" + "\techo foo\n" + "BAR = bar\n" + "$(BAR):\n" + "\techo $@\n",
     )
Exemplo n.º 11
0
 def test_path_normalization(self):
     out = StringIO()
     mk = Makefile()
     rule = mk.create_rule(['c:\\foo'])
     rule.add_dependencies(['c:\\bar', 'c:\\baz\\qux'])
     rule.add_commands(['echo c:\\foo'])
     mk.dump(out)
     self.assertEqual(out.getvalue(),
         'c:/foo: c:/bar c:/baz/qux\n' +
         '\techo c:\\foo\n' +
         'c:/bar c:/baz/qux:\n')
Exemplo n.º 12
0
 def test_statement(self):
     out = StringIO()
     mk = Makefile()
     mk.create_rule(['foo']).add_dependencies(['bar']) \
                            .add_commands(['echo foo'])
     mk.add_statement('BAR = bar')
     mk.create_rule(['$(BAR)']).add_commands(['echo $@'])
     mk.dump(out, removal_guard=False)
     self.assertEqual(
         out.getvalue(), 'foo: bar\n' + '\techo foo\n' + 'BAR = bar\n' +
         '$(BAR):\n' + '\techo $@\n')
Exemplo n.º 13
0
    def test_makefile(self):
        out = StringIO()
        mk = Makefile()
        rule = mk.create_rule(["foo"])
        rule.add_dependencies(["bar", "baz", "qux"])
        rule.add_commands(["echo foo"])
        rule = mk.create_rule().add_targets(["bar", "baz"])
        rule.add_dependencies(["hoge"])
        rule.add_commands(["echo $@"])
        mk.dump(out, removal_guard=False)
        self.assertEqual(
            out.getvalue(),
            "foo: bar baz qux\n" + "\techo foo\n" + "bar baz: hoge\n" + "\techo $@\n",
        )

        out = StringIO()
        mk.dump(out)
        self.assertEqual(
            out.getvalue(),
            "foo: bar baz qux\n"
            + "\techo foo\n"
            + "bar baz: hoge\n"
            + "\techo $@\n"
            + "hoge qux:\n",
        )
Exemplo n.º 14
0
def process(input_dir, inc_paths, bindings_conf, cache_dir, header_dir,
            xpcrs_dir, xpt_dir, deps_dir, module, stems):
    p = IDLParser(outputdir=cache_dir)

    xpts = []
    mk = Makefile()
    rule = mk.create_rule()

    glbl = {}
    execfile(bindings_conf, glbl)
    webidlconfig = glbl['DOMInterfaces']

    # Write out dependencies for Python modules we import. If this list isn't
    # up to date, we will not re-process XPIDL files if the processor changes.
    rule.add_dependencies(iter_modules_in_path(topsrcdir))

    for stem in stems:
        path = os.path.join(input_dir, '%s.idl' % stem)
        idl_data = open(path).read()

        idl = p.parse(idl_data, filename=path)
        idl.resolve([input_dir] + inc_paths, p, webidlconfig)

        header_path = os.path.join(header_dir, '%s.h' % stem)
        rs_rt_path = os.path.join(xpcrs_dir, 'rt', '%s.rs' % stem)
        rs_bt_path = os.path.join(xpcrs_dir, 'bt', '%s.rs' % stem)

        xpts.append(jsonxpt.build_typelib(idl))

        rule.add_dependencies(idl.deps)

        with FileAvoidWrite(header_path) as fh:
            print_header(idl, fh, path)

        with FileAvoidWrite(rs_rt_path) as fh:
            print_rust_bindings(idl, fh, path)

        with FileAvoidWrite(rs_bt_path) as fh:
            print_rust_macros_bindings(idl, fh, path)

    xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
    with FileAvoidWrite(xpt_path) as fh:
        jsonxpt.write(jsonxpt.link(xpts), fh)

    rule.add_targets([xpt_path])
    if deps_dir:
        deps_path = os.path.join(deps_dir, '%s.pp' % module)
        with FileAvoidWrite(deps_path) as fh:
            mk.dump(fh)
Exemplo n.º 15
0
def process(input_dir, inc_paths, cache_dir, header_dir, xpcrs_dir,
            xpt_dir, deps_dir, module, stems):
    p = IDLParser(outputdir=cache_dir)

    xpts = {}
    mk = Makefile()
    rule = mk.create_rule()

    # Write out dependencies for Python modules we import. If this list isn't
    # up to date, we will not re-process XPIDL files if the processor changes.
    rule.add_dependencies(iter_modules_in_path(topsrcdir))

    for stem in stems:
        path = os.path.join(input_dir, '%s.idl' % stem)
        idl_data = open(path).read()

        idl = p.parse(idl_data, filename=path)
        idl.resolve([input_dir] + inc_paths, p)

        header_path = os.path.join(header_dir, '%s.h' % stem)
        rs_rt_path = os.path.join(xpcrs_dir, 'rt', '%s.rs' % stem)
        rs_bt_path = os.path.join(xpcrs_dir, 'bt', '%s.rs' % stem)

        xpt = BytesIO()
        write_typelib(idl, xpt, path)
        xpt.seek(0)
        xpts[stem] = xpt

        rule.add_dependencies(idl.deps)

        with FileAvoidWrite(header_path) as fh:
            print_header(idl, fh, path)

        with FileAvoidWrite(rs_rt_path) as fh:
            print_rust_bindings(idl, fh, path)

        with FileAvoidWrite(rs_bt_path) as fh:
            print_rust_macros_bindings(idl, fh, path)

    # TODO use FileAvoidWrite once it supports binary mode.
    xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
    xpt_link(xpts.values()).write(xpt_path)

    rule.add_targets([xpt_path])
    if deps_dir:
        deps_path = os.path.join(deps_dir, '%s.pp' % module)
        with FileAvoidWrite(deps_path) as fh:
            mk.dump(fh)
Exemplo n.º 16
0
 def dump(self, fh, removal_guard=True):
     rules = {}
     for t, (depfile, deps) in self._targets.items():
         if self._group == Grouping.BY_DEPFILE:
             if depfile not in rules:
                 rules[depfile] = self.create_rule([depfile])
             rules[depfile].add_dependencies(d if d not in self._targets else self._targets[d][0] for d in deps)
         elif self._group == Grouping.ALL_TARGETS:
             if 'all' not in rules:
                 rules['all'] = self.create_rule()
             rules['all'].add_targets([t]) \
                         .add_dependencies(deps)
         elif self._group == Grouping.NO:
             self.create_rule([t]) \
                 .add_dependencies(deps)
     Makefile.dump(self, fh, removal_guard)
Exemplo n.º 17
0
def generate_binding_files(config, outputprefix, srcprefix, webidlfile):
    """
    |config| Is the configuration object.
    |outputprefix| is a prefix to use for the header guards and filename.
    """

    depsname = ".deps/" + outputprefix + ".pp"
    root = CGBindingRoot(config, outputprefix, webidlfile)
    replaceFileIfChanged(outputprefix + ".h", root.declare())
    replaceFileIfChanged(outputprefix + ".cpp", root.define())

    mk = Makefile()
    rule = mk.create_rule([outputprefix + ".h", outputprefix + ".cpp"])
    rule.add_dependencies(os.path.join(srcprefix, x) for x in root.deps())
    rule.add_dependencies(iter_modules_in_path(topsrcdir))
    with open(depsname, "w") as f:
        mk.dump(f)
Exemplo n.º 18
0
def generate_binding_files(config, outputprefix, srcprefix, webidlfile):
    """
    |config| Is the configuration object.
    |outputprefix| is a prefix to use for the header guards and filename.
    """

    depsname = ".deps/" + outputprefix + ".pp"
    root = CGBindingRoot(config, outputprefix, webidlfile)
    replaceFileIfChanged(outputprefix + ".h", root.declare())
    replaceFileIfChanged(outputprefix + ".cpp", root.define())

    mk = Makefile()
    rule = mk.create_rule([outputprefix + '.h', outputprefix + '.cpp'])
    rule.add_dependencies(os.path.join(srcprefix, x) for x in root.deps())
    rule.add_dependencies(iter_modules_in_path(topsrcdir))
    with open(depsname, 'w') as f:
        mk.dump(f)
Exemplo n.º 19
0
 def test_statement(self):
     out = StringIO()
     mk = Makefile()
     mk.create_rule(['foo']).add_dependencies(['bar']) \
                            .add_commands(['echo foo'])
     mk.add_statement('BAR = bar')
     mk.create_rule(['$(BAR)']).add_commands(['echo $@'])
     mk.dump(out, removal_guard=False)
     self.assertEqual(out.getvalue(),
         'foo: bar\n' +
         '\techo foo\n' +
         'BAR = bar\n' +
         '$(BAR):\n' +
         '\techo $@\n')
Exemplo n.º 20
0
    def __init__(self,
                 topsrcdir,
                 topobjdir,
                 dist,
                 group=Grouping.NO,
                 abspaths=False):
        topsrcdir = mozpath.normsep(
            os.path.normcase(os.path.abspath(topsrcdir)))
        topobjdir = mozpath.normsep(
            os.path.normcase(os.path.abspath(topobjdir)))
        dist = mozpath.normsep(os.path.normcase(os.path.abspath(dist)))
        if abspaths:
            topsrcdir_value = topsrcdir
            topobjdir_value = topobjdir
            dist_value = dist
        else:
            topsrcdir_value = '$(topsrcdir)'
            topobjdir_value = '$(DEPTH)'
            dist_value = '$(DIST)'

        self._normpaths = {
            topsrcdir: topsrcdir_value,
            topobjdir: topobjdir_value,
            dist: dist_value,
            '$(topsrcdir)': topsrcdir_value,
            '$(DEPTH)': topobjdir_value,
            '$(DIST)': dist_value,
            '$(depth)':
            topobjdir_value,  # normcase may lowercase variable refs when
            '$(dist)': dist_value,  # they are in the original dependency file
            mozpath.relpath(topobjdir, os.curdir): topobjdir_value,
            mozpath.relpath(dist, os.curdir): dist_value,
        }
        try:
            # mozpath.relpath(topsrcdir, os.curdir) fails when source directory
            # and object directory are not on the same drive on Windows. In
            # this case, the value is not useful in self._normpaths anyways.
            self._normpaths[mozpath.relpath(topsrcdir,
                                            os.curdir)] = topsrcdir_value
        except ValueError:
            pass

        Makefile.__init__(self)
        self._group = group
        self._targets = OrderedDict()
Exemplo n.º 21
0
 def dump(self, fh, removal_guard=True):
     rules = {}
     for t, (depfile, deps) in self._targets.items():
         if self._group == Grouping.BY_DEPFILE:
             if depfile not in rules:
                 rules[depfile] = self.create_rule([depfile])
             rules[depfile].add_dependencies(
                 d if d not in self._targets else self._targets[d][0]
                 for d in deps)
         elif self._group == Grouping.ALL_TARGETS:
             if 'all' not in rules:
                 rules['all'] = self.create_rule()
             rules['all'].add_targets([t]) \
                         .add_dependencies(deps)
         elif self._group == Grouping.NO:
             self.create_rule([t]) \
                 .add_dependencies(deps)
     Makefile.dump(self, fh, removal_guard)
Exemplo n.º 22
0
    def __init__(self,
                 topsrcdir,
                 topobjdir,
                 dist,
                 group=Grouping.NO,
                 abspaths=False):
        topsrcdir = mozpath.normsep(
            os.path.normcase(os.path.abspath(topsrcdir)))
        topobjdir = mozpath.normsep(
            os.path.normcase(os.path.abspath(topobjdir)))
        dist = mozpath.normsep(os.path.normcase(os.path.abspath(dist)))
        if abspaths:
            topsrcdir_value = topsrcdir
            topobjdir_value = topobjdir
            dist_value = dist
        else:
            topsrcdir_value = '$(topsrcdir)'
            topobjdir_value = '$(DEPTH)'
            dist_value = '$(DIST)'

        self._normpaths = {
            topsrcdir: topsrcdir_value,
            topobjdir: topobjdir_value,
            dist: dist_value,
            '$(topsrcdir)': topsrcdir_value,
            '$(DEPTH)': topobjdir_value,
            '$(DIST)': dist_value,
            '$(depth)':
            topobjdir_value,  # normcase may lowercase variable refs when
            '$(dist)': dist_value,  # they are in the original dependency file
            mozpath.relpath(topsrcdir, os.curdir): topsrcdir_value,
            mozpath.relpath(topobjdir, os.curdir): topobjdir_value,
            mozpath.relpath(dist, os.curdir): dist_value,
        }

        Makefile.__init__(self)
        self._group = group
        self._targets = OrderedDict()
Exemplo n.º 23
0
    def __init__(self, topsrcdir, topobjdir, dist, group=Grouping.NO,
                 abspaths=False):
        topsrcdir = mozpath.normsep(os.path.normcase(os.path.abspath(topsrcdir)))
        topobjdir = mozpath.normsep(os.path.normcase(os.path.abspath(topobjdir)))
        dist = mozpath.normsep(os.path.normcase(os.path.abspath(dist)))
        if abspaths:
            topsrcdir_value = topsrcdir
            topobjdir_value = topobjdir
            dist_value = dist
        else:
            topsrcdir_value = '$(topsrcdir)'
            topobjdir_value = '$(DEPTH)'
            dist_value = '$(DIST)'

        self._normpaths = {
            topsrcdir: topsrcdir_value,
            topobjdir: topobjdir_value,
            dist: dist_value,
            '$(topsrcdir)': topsrcdir_value,
            '$(DEPTH)': topobjdir_value,
            '$(DIST)': dist_value,
            '$(depth)': topobjdir_value, # normcase may lowercase variable refs when
            '$(dist)': dist_value,       # they are in the original dependency file
            mozpath.relpath(topobjdir, os.curdir): topobjdir_value,
            mozpath.relpath(dist, os.curdir): dist_value,
        }
        try:
            # mozpath.relpath(topsrcdir, os.curdir) fails when source directory
            # and object directory are not on the same drive on Windows. In
            # this case, the value is not useful in self._normpaths anyways.
            self._normpaths[mozpath.relpath(topsrcdir, os.curdir)] = topsrcdir_value
        except ValueError:
            pass

        Makefile.__init__(self)
        self._group = group
        self._targets = OrderedDict()
Exemplo n.º 24
0
 def gen_depfile(self, name, rules):
     mk = Makefile()
     for target, deps in rules.items():
         mk.create_rule([target]) \
           .add_dependencies(deps)
     depfile = StringIO()
     mk.dump(depfile, removal_guard=True)
     depfile.seek(0)
     depfile.name = name
     return depfile
Exemplo n.º 25
0
    def handleCommandLine(self, args, defaultToStdin=False):
        """
        Parse a commandline into this parser.
        Uses OptionParser internally, no args mean sys.argv[1:].
        """
        def get_output_file(path, encoding=None):
            if encoding is None:
                encoding = "utf-8"
            dir = os.path.dirname(path)
            if dir:
                try:
                    os.makedirs(dir)
                except OSError as error:
                    if error.errno != errno.EEXIST:
                        raise
            return io.open(path, "w", encoding=encoding, newline="\n")

        p = self.getCommandLineParser()
        options, args = p.parse_args(args=args)
        out = self.out
        depfile = None

        if options.output:
            out = get_output_file(options.output, options.output_encoding)
        elif options.output_encoding:
            raise Preprocessor.Error(
                self, "--output-encoding doesn't work without --output", None)
        if defaultToStdin and len(args) == 0:
            args = [sys.stdin]
            if options.depend:
                raise Preprocessor.Error(self,
                                         "--depend doesn't work with stdin",
                                         None)
        if options.depend:
            if not options.output:
                raise Preprocessor.Error(self,
                                         "--depend doesn't work with stdout",
                                         None)
            depfile = get_output_file(options.depend)

        if args:
            for f in args:
                with io.open(f, "rU", encoding="utf-8") as input:
                    self.processFile(input=input, output=out)
            if depfile:
                mk = Makefile()
                mk.create_rule([six.ensure_text(options.output)
                                ]).add_dependencies(self.includes)
                mk.dump(depfile)
                depfile.close()

        if options.output:
            out.close()
Exemplo n.º 26
0
    def test_makefile(self):
        out = StringIO()
        mk = Makefile()
        rule = mk.create_rule(['foo'])
        rule.add_dependencies(['bar', 'baz', 'qux'])
        rule.add_commands(['echo foo'])
        rule = mk.create_rule().add_targets(['bar', 'baz'])
        rule.add_dependencies(['hoge'])
        rule.add_commands(['echo $@'])
        mk.dump(out, removal_guard=False)
        self.assertEqual(
            out.getvalue(), 'foo: bar baz qux\n' + '\techo foo\n' +
            'bar baz: hoge\n' + '\techo $@\n')
        out.truncate(0)

        mk.dump(out)
        self.assertEqual(
            out.getvalue(), 'foo: bar baz qux\n' + '\techo foo\n' +
            'bar baz: hoge\n' + '\techo $@\n' + 'hoge qux:\n')
Exemplo n.º 27
0
    def processFile(self, input, output, depfile=None):
        """
        Preprocesses the contents of the ``input`` stream and writes the result
        to the ``output`` stream. If ``depfile`` is set,  the dependencies of
        ``output`` file are written to ``depfile`` in Makefile format.
        """
        self.out = output

        self.do_include(input, False)
        self.failUnused(input.name)

        if depfile:
            mk = Makefile()
            mk.create_rule([output.name]).add_dependencies(self.includes)
            mk.dump(depfile)
Exemplo n.º 28
0
    def test_makefile(self):
        out = StringIO()
        mk = Makefile()
        rule = mk.create_rule(['foo'])
        rule.add_dependencies(['bar', 'baz', 'qux'])
        rule.add_commands(['echo foo'])
        rule = mk.create_rule().add_targets(['bar', 'baz'])
        rule.add_dependencies(['hoge'])
        rule.add_commands(['echo $@'])
        mk.dump(out, removal_guard=False)
        self.assertEqual(out.getvalue(),
            'foo: bar baz qux\n' +
            '\techo foo\n' +
            'bar baz: hoge\n' +
            '\techo $@\n')
        out.truncate(0)

        mk.dump(out)
        self.assertEqual(out.getvalue(),
            'foo: bar baz qux\n' +
            '\techo foo\n' +
            'bar baz: hoge\n' +
            '\techo $@\n' +
            'hoge qux:\n')
Exemplo n.º 29
0
def main(argv):
    parser = argparse.ArgumentParser('Generate a file from a Python script',
                                     add_help=False)
    parser.add_argument('python_script',
                        metavar='python-script',
                        type=str,
                        help='The Python script to run')
    parser.add_argument('method_name',
                        metavar='method-name',
                        type=str,
                        help='The method of the script to invoke')
    parser.add_argument('output_file',
                        metavar='output-file',
                        type=str,
                        help='The file to generate')
    parser.add_argument(
        'dep_file',
        metavar='dep-file',
        type=str,
        help='File to write any additional make dependencies to')
    parser.add_argument(
        'additional_arguments',
        metavar='arg',
        nargs='*',
        help="Additional arguments to the script's main() method")

    args = parser.parse_args(argv)

    script = args.python_script
    # Permit the script to import modules from the same directory in which it
    # resides.  The justification for doing this is that if we were invoking
    # the script as:
    #
    #    python script arg1...
    #
    # then importing modules from the script's directory would come for free.
    # Since we're invoking the script in a roundabout way, we provide this
    # bit of convenience.
    sys.path.append(os.path.dirname(script))
    with open(script, 'r') as fh:
        module = imp.load_module('script', fh, script,
                                 ('.py', 'r', imp.PY_SOURCE))
    method = args.method_name
    if not hasattr(module, method):
        print('Error: script "{0}" is missing a {1} method'.format(
            script, method),
              file=sys.stderr)
        return 1

    ret = 1
    try:
        with FileAvoidWrite(args.output_file) as output:
            ret = module.__dict__[method](output, *args.additional_arguments)
            # We treat sets as a statement of success.  Everything else
            # is an error (so scripts can conveniently |return 1| or
            # similar).
            if isinstance(ret, set) and ret:
                mk = Makefile()
                mk.create_rule([args.output_file]).add_dependencies(ret)
                with FileAvoidWrite(args.dep_file) as dep_file:
                    mk.dump(dep_file)
                # The script succeeded, so reset |ret| to indicate that.
                ret = None
    except IOError as e:
        print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
        traceback.print_exc()
        return 1
    return ret
Exemplo n.º 30
0
def InvokeClWithDependencyGeneration(cmdline):
    target = ""
    # Figure out what the target is
    for arg in cmdline:
        if arg.startswith("-Fo"):
            target = arg[3:]
            break

    if target is None:
        print >>sys.stderr, "No target set"
        return 1

    # Assume the source file is the last argument
    source = cmdline[-1]
    assert not source.startswith('-')

    # The deps target lives here
    depstarget = os.path.basename(target) + ".pp"

    cmdline += ['-showIncludes']

    mk = Makefile()
    rule = mk.create_rule([target])
    rule.add_dependencies([normcase(source)])

    def on_line(line):
        # cl -showIncludes prefixes every header with "Note: including file:"
        # and an indentation corresponding to the depth (which we don't need)
        if line.startswith(CL_INCLUDES_PREFIX):
            dep = line[len(CL_INCLUDES_PREFIX):].strip()
            # We can't handle pathes with spaces properly in mddepend.pl, but
            # we can assume that anything in a path with spaces is a system
            # header and throw it away.
            dep = normcase(dep)
            if ' ' not in dep:
                rule.add_dependencies([dep])
        else:
            # Make sure we preserve the relevant output from cl. mozprocess
            # swallows the newline delimiter, so we need to re-add it.
            sys.stdout.write(line)
            sys.stdout.write('\n')

    # We need to ignore children because MSVC can fire up a background process
    # during compilation. This process is cleaned up on its own. If we kill it,
    # we can run into weird compilation issues.
    p = ProcessHandlerMixin(cmdline, processOutputLine=[on_line],
        ignore_children=True)
    p.run()
    p.processOutput()
    ret = p.wait()

    if ret != 0 or target == "":
        # p.wait() returns a long. Somehow sys.exit(long(0)) is like
        # sys.exit(1). Don't ask why.
        return int(ret)

    depsdir = os.path.normpath(os.path.join(os.curdir, ".deps"))
    depstarget = os.path.join(depsdir, depstarget)
    if not os.path.isdir(depsdir):
        try:
            os.makedirs(depsdir)
        except OSError:
            pass # This suppresses the error we get when the dir exists, at the
                 # cost of masking failure to create the directory.  We'll just
                 # die on the next line though, so it's not that much of a loss.

    with open(depstarget, "w") as f:
        mk.dump(f)

    return 0
Exemplo n.º 31
0
    def consume_finished(self):
        mk = Makefile()
        # Add the default rule at the very beginning.
        mk.create_rule(['default'])
        mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
        mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)
        if not self._has_xpidl:
            mk.add_statement('NO_XPIDL = 1')

        # Add a few necessary variables inherited from configure
        for var in (
            'PYTHON',
            'ACDEFINES',
            'MOZ_BUILD_APP',
            'MOZ_WIDGET_TOOLKIT',
        ):
            value = self.environment.substs.get(var)
            if value is not None:
                mk.add_statement('%s = %s' % (var, value))

        install_manifests_bases = self._install_manifests.keys()

        # Add information for chrome manifest generation
        manifest_targets = []

        for target, entries in self._manifest_entries.iteritems():
            manifest_targets.append(target)
            install_target = mozpath.basedir(target, install_manifests_bases)
            self._install_manifests[install_target].add_content(
                ''.join('%s\n' % e for e in sorted(entries)),
                mozpath.relpath(target, install_target))

        # Add information for install manifests.
        mk.add_statement('INSTALL_MANIFESTS = %s'
                         % ' '.join(self._install_manifests.keys()))

        # Add dependencies we infered:
        for target, deps in self._dependencies.iteritems():
            mk.create_rule([target]).add_dependencies(
                '$(TOPOBJDIR)/%s' % d for d in deps)

        mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')

        for base, install_manifest in self._install_manifests.iteritems():
            with self._write_file(
                    mozpath.join(self.environment.topobjdir, 'faster',
                                 'install_%s' % base.replace('/', '_'))) as fh:
                install_manifest.write(fileobj=fh)

        # For artifact builds only, write a single unified manifest for consumption by |mach watch|.
        if self.environment.is_artifact_build:
            unified_manifest = InstallManifest()
            for base, install_manifest in self._install_manifests.iteritems():
                # Expect 'dist/bin/**', which includes 'dist/bin' with no trailing slash.
                assert base.startswith('dist/bin')
                base = base[len('dist/bin'):]
                if base and base[0] == '/':
                    base = base[1:]
                unified_manifest.add_entries_from(install_manifest, base=base)

            with self._write_file(
                    mozpath.join(self.environment.topobjdir, 'faster',
                                 'unified_install_dist_bin')) as fh:
                unified_manifest.write(fileobj=fh)

        with self._write_file(
                mozpath.join(self.environment.topobjdir, 'faster',
                             'Makefile')) as fh:
            mk.dump(fh, removal_guard=False)
Exemplo n.º 32
0
        if options.verbose:
            print_command(sys.stderr, args)
        try:
            proc = subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
        except Exception, e:
            print >>sys.stderr, 'error: Launching', args, ':', e
            raise e
        (stdout, stderr) = proc.communicate()
        if proc.returncode and not options.verbose:
            print_command(sys.stderr, args)
        sys.stderr.write(stdout)
        sys.stderr.flush()
        if proc.returncode:
            exit(proc.returncode)
    if not options.depend:
        return
    ensureParentDir(options.depend)
    mk = Makefile()
    deps = [dep for dep in deps if os.path.isfile(dep) and dep != options.target
            and os.path.abspath(dep) != os.path.abspath(options.depend)]
    no_dynamic_lib = [dep for dep in deps if not isDynamicLib(dep)]
    mk.create_rule([options.target]).add_dependencies(no_dynamic_lib)
    if len(deps) != len(no_dynamic_lib):
        mk.create_rule(['%s_order_only' % options.target]).add_dependencies(dep for dep in deps if isDynamicLib(dep))

    with open(options.depend, 'w') as depfile:
        mk.dump(depfile, removal_guard=True)

if __name__ == '__main__':
    main()
Exemplo n.º 33
0
def process(input_dirs, inc_paths, bindings_conf, header_dir, xpcrs_dir,
            xpt_dir, deps_dir, module, idl_files):
    p = IDLParser()

    xpts = []
    mk = Makefile()
    rule = mk.create_rule()

    glbl = {}
    exec(open(bindings_conf, encoding='utf-8').read(), glbl)
    webidlconfig = glbl['DOMInterfaces']

    # Write out dependencies for Python modules we import. If this list isn't
    # up to date, we will not re-process XPIDL files if the processor changes.
    rule.add_dependencies(
        six.ensure_text(s) for s in iter_modules_in_path(topsrcdir))

    for path in idl_files:
        basename = os.path.basename(path)
        stem, _ = os.path.splitext(basename)
        idl_data = open(path, encoding='utf-8').read()

        idl = p.parse(idl_data, filename=path)
        idl.resolve(inc_paths, p, webidlconfig)

        header_path = os.path.join(header_dir, '%s.h' % stem)
        rs_rt_path = os.path.join(xpcrs_dir, 'rt', '%s.rs' % stem)
        rs_bt_path = os.path.join(xpcrs_dir, 'bt', '%s.rs' % stem)

        xpts.append(jsonxpt.build_typelib(idl))

        rule.add_dependencies(six.ensure_text(s) for s in idl.deps)

        # The print_* functions don't actually do anything with the
        # passed-in path other than writing it into the file to let people
        # know where the original source was.  This script receives
        # absolute paths, which are not so great to embed in header files
        # (they mess with deterministic generation of files on different
        # machines, Searchfox logic, shared compilation caches, etc.), so
        # we pass in fake paths that are the same across compilations, but
        # should still enable people to figure out where to go.
        relpath = mozpath.relpath(path, topsrcdir)

        with FileAvoidWrite(header_path) as fh:
            print_header(idl, fh, path, relpath)

        with FileAvoidWrite(rs_rt_path) as fh:
            print_rust_bindings(idl, fh, relpath)

        with FileAvoidWrite(rs_bt_path) as fh:
            print_rust_macros_bindings(idl, fh, relpath)

    # NOTE: We don't use FileAvoidWrite here as we may re-run this code due to a
    # number of different changes in the code, which may not cause the .xpt
    # files to be changed in any way. This means that make will re-run us every
    # time a build is run whether or not anything changed. To fix this we
    # unconditionally write out the file.
    xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
    with open(xpt_path, 'w', encoding='utf-8', newline='\n') as fh:
        jsonxpt.write(jsonxpt.link(xpts), fh)

    rule.add_targets([six.ensure_text(xpt_path)])
    if deps_dir:
        deps_path = os.path.join(deps_dir, '%s.pp' % module)
        with FileAvoidWrite(deps_path) as fh:
            mk.dump(fh)
Exemplo n.º 34
0
    def consume_finished(self):
        mk = Makefile()
        # Add the default rule at the very beginning.
        mk.create_rule(['default'])
        mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
        mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)
        mk.add_statement('MDDEPDIR = .deps')
        mk.add_statement('TOUCH ?= touch')
        mk.add_statement('include $(TOPSRCDIR)/config/makefiles/functions.mk')
        mk.add_statement('include $(TOPSRCDIR)/config/AB_rCD.mk')
        mk.add_statement('AB_CD = en-US')
        if not self._has_xpidl:
            mk.add_statement('NO_XPIDL = 1')

        # Add a few necessary variables inherited from configure
        for var in (
                'PYTHON3',
                'ACDEFINES',
                'MOZ_BUILD_APP',
                'MOZ_WIDGET_TOOLKIT',
        ):
            value = self.environment.substs.get(var)
            if value is not None:
                mk.add_statement('%s = %s' % (var, value))

        install_manifests_bases = self._install_manifests.keys()

        # Add information for chrome manifest generation
        manifest_targets = []

        for target, entries in six.iteritems(self._manifest_entries):
            manifest_targets.append(target)
            install_target = mozpath.basedir(target, install_manifests_bases)
            self._install_manifests[install_target].add_content(
                ''.join('%s\n' % e for e in sorted(entries)),
                mozpath.relpath(target, install_target))

        # Add information for install manifests.
        mk.add_statement('INSTALL_MANIFESTS = %s' %
                         ' '.join(sorted(self._install_manifests.keys())))

        # Add dependencies we inferred:
        for target, deps in sorted(six.iteritems(self._dependencies)):
            mk.create_rule([target]).add_dependencies('$(TOPOBJDIR)/%s' % d
                                                      for d in sorted(deps))

        # This is not great, but it's better to have some dependencies on these Python files.
        python_deps = [
            '$(TOPSRCDIR)/python/mozbuild/mozbuild/action/l10n_merge.py',
            '$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/compare.py',
            '$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/paths.py',
        ]
        # Add l10n dependencies we inferred:
        for target, deps in sorted(six.iteritems(self._l10n_dependencies)):
            mk.create_rule([target]).add_dependencies(
                '%s' % d[0] for d in sorted(deps, key=itemgetter(0)))
            for (merge, ref_file, l10n_file) in deps:
                rule = mk.create_rule([
                    merge
                ]).add_dependencies([ref_file, l10n_file] + python_deps)
                rule.add_commands([
                    '$(PYTHON3) -m mozbuild.action.l10n_merge '
                    '--output {} --ref-file {} --l10n-file {}'.format(
                        merge, ref_file, l10n_file)
                ])
                # Add a dummy rule for the l10n file since it might not exist.
                mk.create_rule([l10n_file])

        mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')

        for base, install_manifest in six.iteritems(self._install_manifests):
            with self._write_file(
                    mozpath.join(self.environment.topobjdir, 'faster',
                                 'install_%s' % base.replace('/', '_'))) as fh:
                install_manifest.write(fileobj=fh)

        # For artifact builds only, write a single unified manifest
        # for consumption by |mach watch|.
        if self.environment.is_artifact_build:
            unified_manifest = InstallManifest()
            for base, install_manifest in six.iteritems(
                    self._install_manifests):
                # Expect 'dist/bin/**', which includes 'dist/bin' with no trailing slash.
                assert base.startswith('dist/bin')
                base = base[len('dist/bin'):]
                if base and base[0] == '/':
                    base = base[1:]
                unified_manifest.add_entries_from(install_manifest, base=base)

            with self._write_file(
                    mozpath.join(self.environment.topobjdir, 'faster',
                                 'unified_install_dist_bin')) as fh:
                unified_manifest.write(fileobj=fh)

        for obj in self._generated_files:
            for stmt in self._format_statements_for_generated_file(
                    obj, 'default'):
                mk.add_statement(stmt)

        with self._write_file(
                mozpath.join(self.environment.topobjdir, 'faster',
                             'Makefile')) as fh:
            mk.dump(fh, removal_guard=False)
Exemplo n.º 35
0
    def consume_finished(self):
        mk = Makefile()
        # Add the default rule at the very beginning.
        mk.create_rule(['default'])
        mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
        mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)
        mk.add_statement('BACKEND = %s' % self._backend_output_list_file)
        if not self._has_xpidl:
            mk.add_statement('NO_XPIDL = 1')

        # Add a few necessary variables inherited from configure
        for var in (
            'PYTHON',
            'ACDEFINES',
            'MOZ_BUILD_APP',
            'MOZ_WIDGET_TOOLKIT',
        ):
            mk.add_statement('%s = %s' % (var, self.environment.substs[var]))

        install_manifests_bases = self._install_manifests.keys()

        # Add information for chrome manifest generation
        manifest_targets = []

        for target, entries in self._manifest_entries.iteritems():
            manifest_targets.append(target)
            install_target = mozpath.basedir(target, install_manifests_bases)
            self._install_manifests[install_target].add_content(
                ''.join('%s\n' % e for e in sorted(entries)),
                mozpath.relpath(target, install_target))

        # Add information for install manifests.
        mk.add_statement('INSTALL_MANIFESTS = %s'
                         % ' '.join(self._install_manifests.keys()))

        # Add dependencies we infered:
        for target, deps in self._dependencies.iteritems():
            mk.create_rule([target]).add_dependencies(
                '$(TOPOBJDIR)/%s' % d for d in deps)

        # Add backend dependencies:
        mk.create_rule([self._backend_output_list_file]).add_dependencies(
            self.backend_input_files)

        mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')

        for base, install_manifest in self._install_manifests.iteritems():
            with self._write_file(
                    mozpath.join(self.environment.topobjdir, 'faster',
                                 'install_%s' % base.replace('/', '_'))) as fh:
                install_manifest.write(fileobj=fh)

        with self._write_file(
                mozpath.join(self.environment.topobjdir, 'faster',
                             'Makefile')) as fh:
            mk.dump(fh, removal_guard=False)
Exemplo n.º 36
0
    def consume_finished(self):
        mk = Makefile()
        # Add the default rule at the very beginning.
        mk.create_rule(['default'])
        mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
        mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)

        # Add a few necessary variables inherited from configure
        for var in (
            'PYTHON',
            'ACDEFINES',
            'MOZ_CHROME_FILE_FORMAT',
        ):
            mk.add_statement('%s = %s' % (var, self.environment.substs[var]))

        # Add all necessary information for jar manifest processing
        jar_mn_targets = []

        for path, (objdir, install_target, defines) in \
                self._jar_manifests.iteritems():
            rel_manifest = mozpath.relpath(path, self.environment.topsrcdir)
            target = rel_manifest.replace('/', '-')
            assert target not in jar_mn_targets
            jar_mn_targets.append(target)
            target = 'jar-%s' % target
            mk.create_rule([target]).add_dependencies([path])
            if objdir != mozpath.join(self.environment.topobjdir,
                                      mozpath.dirname(rel_manifest)):
                mk.create_rule([target]).add_dependencies(
                    ['objdir = %s' % objdir])
            if install_target != 'dist/bin':
                mk.create_rule([target]).add_dependencies(
                    ['install_target = %s' % install_target])
            if defines:
                mk.create_rule([target]).add_dependencies(
                    ['defines = %s' % ' '.join(defines)])

        mk.add_statement('JAR_MN_TARGETS = %s' % ' '.join(jar_mn_targets))

        # Add information for chrome manifest generation
        manifest_targets = []

        for target, entries in self._manifest_entries.iteritems():
            manifest_targets.append(target)
            target = '$(TOPOBJDIR)/%s' % target
            mk.create_rule([target]).add_dependencies(
                ['content = %s' % ' '.join('"%s"' % e for e in entries)])

        mk.add_statement('MANIFEST_TARGETS = %s' % ' '.join(manifest_targets))

        # Add information for install manifests.
        mk.add_statement('INSTALL_MANIFESTS = %s'
                         % ' '.join(self._install_manifests.keys()))

        mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')

        for base, install_manifest in self._install_manifests.iteritems():
            with self._write_file(
                    mozpath.join(self.environment.topobjdir, 'faster',
                                 'install_%s' % base.replace('/', '_'))) as fh:
                install_manifest.write(fileobj=fh)

        with self._write_file(
                mozpath.join(self.environment.topobjdir, 'faster',
                             'Makefile')) as fh:
            mk.dump(fh, removal_guard=False)
Exemplo n.º 37
0
    def consume_finished(self):
        mk = Makefile()
        # Add the default rule at the very beginning.
        mk.create_rule(['default'])
        mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
        mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)

        # Add a few necessary variables inherited from configure
        for var in (
            'PYTHON',
            'ACDEFINES',
            'MOZ_BUILD_APP',
            'MOZ_WIDGET_TOOLKIT',
        ):
            mk.add_statement('%s = %s' % (var, self.environment.substs[var]))

        # Add information for chrome manifest generation
        manifest_targets = []

        for target, entries in self._manifest_entries.iteritems():
            manifest_targets.append(target)
            target = '$(TOPOBJDIR)/%s' % target
            mk.create_rule([target]).add_dependencies(
                ['content = %s' % ' '.join('"%s"' % e for e in entries)])

        mk.add_statement('MANIFEST_TARGETS = %s' % ' '.join(manifest_targets))

        # Add information for install manifests.
        mk.add_statement('INSTALL_MANIFESTS = %s'
                         % ' '.join(self._install_manifests.keys()))

        # Add dependencies we infered:
        for target, deps in self._dependencies.iteritems():
            mk.create_rule([target]).add_dependencies(
                '$(TOPOBJDIR)/%s' % d for d in deps)

        mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')

        for base, install_manifest in self._install_manifests.iteritems():
            with self._write_file(
                    mozpath.join(self.environment.topobjdir, 'faster',
                                 'install_%s' % base.replace('/', '_'))) as fh:
                install_manifest.write(fileobj=fh)

        with self._write_file(
                mozpath.join(self.environment.topobjdir, 'faster',
                             'Makefile')) as fh:
            mk.dump(fh, removal_guard=False)
Exemplo n.º 38
0
    def consume_finished(self):
        mk = Makefile()
        # Add the default rule at the very beginning.
        mk.create_rule(['default'])
        mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
        mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)

        # Add a few necessary variables inherited from configure
        for var in (
            'PYTHON',
            'ACDEFINES',
            'MOZ_BUILD_APP',
            'MOZ_WIDGET_TOOLKIT',
        ):
            mk.add_statement('%s = %s' % (var, self.environment.substs[var]))

        # Add information for chrome manifest generation
        manifest_targets = []

        for target, entries in self._manifest_entries.iteritems():
            manifest_targets.append(target)
            target = '$(TOPOBJDIR)/%s' % target
            mk.create_rule([target]).add_dependencies(
                ['content = %s' % ' '.join('"%s"' % e for e in entries)])

        mk.add_statement('MANIFEST_TARGETS = %s' % ' '.join(manifest_targets))

        # Add information for install manifests.
        mk.add_statement('INSTALL_MANIFESTS = %s'
                         % ' '.join(self._install_manifests.keys()))

        # Add dependencies we infered:
        for target, deps in self._dependencies.iteritems():
            mk.create_rule([target]).add_dependencies(
                '$(TOPOBJDIR)/%s' % d for d in deps)

        mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')

        for base, install_manifest in self._install_manifests.iteritems():
            with self._write_file(
                    mozpath.join(self.environment.topobjdir, 'faster',
                                 'install_%s' % base.replace('/', '_'))) as fh:
                install_manifest.write(fileobj=fh)

        with self._write_file(
                mozpath.join(self.environment.topobjdir, 'faster',
                             'Makefile')) as fh:
            mk.dump(fh, removal_guard=False)
Exemplo n.º 39
0
def InvokeClWithDependencyGeneration(cmdline):
    target = ""
    # Figure out what the target is
    for arg in cmdline:
        if arg.startswith("-Fo"):
            target = arg[3:]
            break

    if target is None:
        print >> sys.stderr, "No target set"
        return 1

    # Assume the source file is the last argument
    source = cmdline[-1]
    assert not source.startswith('-')

    # The deps target lives here
    depstarget = os.path.basename(target) + ".pp"

    showincludes = '-showIncludes' in cmdline
    cmdline += ['-showIncludes']

    mk = Makefile()
    rule = mk.create_rule([target])
    rule.add_dependencies([normcase(source)])

    def on_line(line):
        # cl -showIncludes prefixes every header with "Note: including file:"
        # and an indentation corresponding to the depth (which we don't need)
        if line.startswith(CL_INCLUDES_PREFIX):
            dep = line[len(CL_INCLUDES_PREFIX):].strip()
            # We can't handle pathes with spaces properly in mddepend.pl, but
            # we can assume that anything in a path with spaces is a system
            # header and throw it away.
            dep = normcase(dep)
            if ' ' not in dep:
                rule.add_dependencies([dep])
            # Hide the line by returning early
            if not showincludes:
                return
        # Make sure we preserve the relevant output from cl. mozprocess
        # swallows the newline delimiter, so we need to re-add it.
        sys.stdout.write(line)
        sys.stdout.write('\n')

    # We need to ignore children because MSVC can fire up a background process
    # during compilation. This process is cleaned up on its own. If we kill it,
    # we can run into weird compilation issues.
    p = ProcessHandlerMixin(cmdline,
                            processOutputLine=[on_line],
                            ignore_children=True)
    p.run()
    p.processOutput()
    ret = p.wait()

    if ret != 0 or target == "":
        # p.wait() returns a long. Somehow sys.exit(long(0)) is like
        # sys.exit(1). Don't ask why.
        return int(ret)

    depsdir = os.path.normpath(os.path.join(os.curdir, ".deps"))
    depstarget = os.path.join(depsdir, depstarget)
    if not os.path.isdir(depsdir):
        try:
            os.makedirs(depsdir)
        except OSError:
            pass  # This suppresses the error we get when the dir exists, at the
            # cost of masking failure to create the directory.  We'll just
            # die on the next line though, so it's not that much of a loss.

    with open(depstarget, "w") as f:
        mk.dump(f)

    return 0
Exemplo n.º 40
0
    def generate_build_files(self):
        """Generate files required for the build.

        This function is in charge of generating all the .h/.cpp files derived
        from input .webidl files. Please note that there are build actions
        required to produce .webidl files and these build actions are
        explicitly not captured here: this function assumes all .webidl files
        are present and up to date.

        This routine is called as part of the build to ensure files that need
        to exist are present and up to date. This routine may not be called if
        the build dependencies (generated as a result of calling this the first
        time) say everything is up to date.

        Because reprocessing outputs for every .webidl on every invocation
        is expensive, we only regenerate the minimal set of files on every
        invocation. The rules for deciding what needs done are roughly as
        follows:

        1. If any .webidl changes, reparse all .webidl files and regenerate
           the global derived files. Only regenerate output files (.h/.cpp)
           impacted by the modified .webidl files.
        2. If an non-.webidl dependency (Python files, config file) changes,
           assume everything is out of date and regenerate the world. This
           is because changes in those could globally impact every output
           file.
        3. If an output file is missing, ensure it is present by performing
           necessary regeneration.
        """
        # Despite #1 above, we assume the build system is smart enough to not
        # invoke us if nothing has changed. Therefore, any invocation means
        # something has changed. And, if anything has changed, we need to
        # parse the WebIDL.
        self._parse_webidl()

        result = BuildResult()

        # If we parse, we always update globals - they are cheap and it is
        # easier that way.
        created, updated, unchanged = self._write_global_derived()
        result.created |= created
        result.updated |= updated
        result.unchanged |= unchanged

        # If any of the extra dependencies changed, regenerate the world.
        global_changed, global_hashes = self._global_dependencies_changed()
        if global_changed:
            # Make a copy because we may modify.
            changed_inputs = set(self._input_paths)
        else:
            changed_inputs = self._compute_changed_inputs()

        self._state['global_depends'] = global_hashes

        # Generate bindings from .webidl files.
        for filename in sorted(changed_inputs):
            basename = mozpath.basename(filename)
            result.inputs.add(filename)
            written, deps = self._generate_build_files_for_webidl(filename)
            result.created |= written[0]
            result.updated |= written[1]
            result.unchanged |= written[2]

            self._state['webidls'][basename] = dict(
                filename=filename,
                outputs=written[0] | written[1] | written[2],
                inputs=set(deps),
                sha1=self._input_hashes[filename],
            )

        # Process some special interfaces required for testing.
        for interface in self._example_interfaces:
            written = self.generate_example_files(interface)
            result.created |= written[0]
            result.updated |= written[1]
            result.unchanged |= written[2]

        # Generate a make dependency file.
        if self._make_deps_path:
            mk = Makefile()
            codegen_rule = mk.create_rule([self._make_deps_target])
            codegen_rule.add_dependencies(global_hashes.keys())
            codegen_rule.add_dependencies(self._input_paths)

            with FileAvoidWrite(self._make_deps_path) as fh:
                mk.dump(fh)

        self._save_state()

        return result
Exemplo n.º 41
0
def main(argv):
    parser = argparse.ArgumentParser('Generate a file from a Python script',
                                     add_help=False)
    parser.add_argument('python_script', metavar='python-script', type=str,
                        help='The Python script to run')
    parser.add_argument('method_name', metavar='method-name', type=str,
                        help='The method of the script to invoke')
    parser.add_argument('output_file', metavar='output-file', type=str,
                        help='The file to generate')
    parser.add_argument('dep_file', metavar='dep-file', type=str,
                        help='File to write any additional make dependencies to')
    parser.add_argument('additional_arguments', metavar='arg',
                        nargs=argparse.REMAINDER,
                        help="Additional arguments to the script's main() method")

    args = parser.parse_args(argv)

    script = args.python_script
    # Permit the script to import modules from the same directory in which it
    # resides.  The justification for doing this is that if we were invoking
    # the script as:
    #
    #    python script arg1...
    #
    # then importing modules from the script's directory would come for free.
    # Since we're invoking the script in a roundabout way, we provide this
    # bit of convenience.
    sys.path.append(os.path.dirname(script))
    with open(script, 'r') as fh:
        module = imp.load_module('script', fh, script,
                                 ('.py', 'r', imp.PY_SOURCE))
    method = args.method_name
    if not hasattr(module, method):
        print('Error: script "{0}" is missing a {1} method'.format(script, method),
              file=sys.stderr)
        return 1

    ret = 1
    try:
        with FileAvoidWrite(args.output_file) as output:
            ret = module.__dict__[method](output, *args.additional_arguments)
            # The following values indicate a statement of success:
            #  - a set() (see below)
            #  - 0
            #  - False
            #  - None
            #
            # Everything else is an error (so scripts can conveniently |return
            # 1| or similar). If a set is returned, the elements of the set
            # indicate additional dependencies that will be listed in the deps
            # file. Python module imports are automatically included as
            # dependencies.
            if isinstance(ret, set):
                deps = ret
                # The script succeeded, so reset |ret| to indicate that.
                ret = None
            else:
                deps = set()

            # Only write out the dependencies if the script was successful
            if not ret:
                # Add dependencies on any python modules that were imported by
                # the script.
                deps |= set(iter_modules_in_path(buildconfig.topsrcdir,
                                                 buildconfig.topobjdir))
                mk = Makefile()
                mk.create_rule([args.output_file]).add_dependencies(deps)
                with FileAvoidWrite(args.dep_file) as dep_file:
                    mk.dump(dep_file)
        # Even when our file's contents haven't changed, we want to update
        # the file's mtime so make knows this target isn't still older than
        # whatever prerequisite caused it to be built this time around.
        try:
            os.utime(args.output_file, None)
        except:
            print('Error processing file "{0}"'.format(args.output_file),
                  file=sys.stderr)
            traceback.print_exc()
    except IOError as e:
        print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
        traceback.print_exc()
        return 1
    return ret
Exemplo n.º 42
0
def main(argv):
    parser = argparse.ArgumentParser("Generate a file from a Python script", add_help=False)
    parser.add_argument("python_script", metavar="python-script", type=str, help="The Python script to run")
    parser.add_argument("method_name", metavar="method-name", type=str, help="The method of the script to invoke")
    parser.add_argument("output_file", metavar="output-file", type=str, help="The file to generate")
    parser.add_argument(
        "dep_file", metavar="dep-file", type=str, help="File to write any additional make dependencies to"
    )
    parser.add_argument(
        "additional_arguments",
        metavar="arg",
        nargs=argparse.REMAINDER,
        help="Additional arguments to the script's main() method",
    )

    args = parser.parse_args(argv)

    script = args.python_script
    # Permit the script to import modules from the same directory in which it
    # resides.  The justification for doing this is that if we were invoking
    # the script as:
    #
    #    python script arg1...
    #
    # then importing modules from the script's directory would come for free.
    # Since we're invoking the script in a roundabout way, we provide this
    # bit of convenience.
    sys.path.append(os.path.dirname(script))
    with open(script, "r") as fh:
        module = imp.load_module("script", fh, script, (".py", "r", imp.PY_SOURCE))
    method = args.method_name
    if not hasattr(module, method):
        print('Error: script "{0}" is missing a {1} method'.format(script, method), file=sys.stderr)
        return 1

    ret = 1
    try:
        with FileAvoidWrite(args.output_file) as output:
            ret = module.__dict__[method](output, *args.additional_arguments)
            # We treat sets as a statement of success.  Everything else
            # is an error (so scripts can conveniently |return 1| or
            # similar).
            if isinstance(ret, set) and ret:
                ret |= set(iter_modules_in_path(buildconfig.topsrcdir, buildconfig.topobjdir))
                mk = Makefile()
                mk.create_rule([args.output_file]).add_dependencies(ret)
                with FileAvoidWrite(args.dep_file) as dep_file:
                    mk.dump(dep_file)
                # The script succeeded, so reset |ret| to indicate that.
                ret = None
        # Even when our file's contents haven't changed, we want to update
        # the file's mtime so make knows this target isn't still older than
        # whatever prerequisite caused it to be built this time around.
        try:
            os.utime(args.output_file, None)
        except:
            print('Error processing file "{0}"'.format(args.output_file), file=sys.stderr)
            traceback.print_exc()
    except IOError as e:
        print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
        traceback.print_exc()
        return 1
    return ret
Exemplo n.º 43
0
def config_status(config):
    # Sanitize config data to feed config.status
    # Ideally, all the backend and frontend code would handle the booleans, but
    # there are so many things involved, that it's easier to keep config.status
    # untouched for now.
    def sanitized_bools(v):
        if v is True:
            return '1'
        if v is False:
            return ''
        return v

    sanitized_config = {}
    sanitized_config['substs'] = {
        k: sanitized_bools(v) for k, v in config.iteritems()
        if k not in ('DEFINES', 'non_global_defines', 'TOPSRCDIR', 'TOPOBJDIR',
                     'ALL_CONFIGURE_PATHS')
    }
    sanitized_config['defines'] = {
        k: sanitized_bools(v) for k, v in config['DEFINES'].iteritems()
    }
    sanitized_config['non_global_defines'] = config['non_global_defines']
    sanitized_config['topsrcdir'] = config['TOPSRCDIR']
    sanitized_config['topobjdir'] = config['TOPOBJDIR']
    sanitized_config['mozconfig'] = config.get('MOZCONFIG')

    # Create config.status. Eventually, we'll want to just do the work it does
    # here, when we're able to skip configure tests/use cached results/not rely
    # on autoconf.
    print("Creating config.status", file=sys.stderr)
    encoding = 'mbcs' if sys.platform == 'win32' else 'utf-8'
    with codecs.open('config.status', 'w', encoding) as fh:
        fh.write(textwrap.dedent('''\
            #!%(python)s
            # coding=%(encoding)s
            from __future__ import unicode_literals
            from mozbuild.util import encode
            encoding = '%(encoding)s'
        ''') % {'python': config['PYTHON'], 'encoding': encoding})
        # A lot of the build backend code is currently expecting byte
        # strings and breaks in subtle ways with unicode strings. (bug 1296508)
        for k, v in sanitized_config.iteritems():
            fh.write('%s = encode(%s, encoding)\n' % (k, indented_repr(v)))
        fh.write("__all__ = ['topobjdir', 'topsrcdir', 'defines', "
                 "'non_global_defines', 'substs', 'mozconfig']")

        if config.get('MOZ_BUILD_APP') != 'js' or config.get('JS_STANDALONE'):
            fh.write(textwrap.dedent('''
                if __name__ == '__main__':
                    from mozbuild.util import patch_main
                    patch_main()
                    from mozbuild.config_status import config_status
                    args = dict([(name, globals()[name]) for name in __all__])
                    config_status(**args)
            '''))

    partial_config = PartialConfigEnvironment(config['TOPOBJDIR'])
    partial_config.write_vars(sanitized_config)

    # Write out a depfile so Make knows to re-run configure when relevant Python
    # changes.
    mk = Makefile()
    rule = mk.create_rule()
    rule.add_targets(["%s/config.status" % config['TOPOBJDIR']])
    rule.add_dependencies(itertools.chain(config['ALL_CONFIGURE_PATHS'],
                                          iter_modules_in_path(config['TOPOBJDIR'],
                                                               config['TOPSRCDIR'])))
    with open('configure.d', 'w') as fh:
        mk.dump(fh)

    # Other things than us are going to run this file, so we need to give it
    # executable permissions.
    os.chmod('config.status', 0o755)
    if config.get('MOZ_BUILD_APP') != 'js' or config.get('JS_STANDALONE'):
        os.environ[b'WRITE_MOZINFO'] = b'1'
        from mozbuild.config_status import config_status

        # Some values in sanitized_config also have more complex types, such as
        # EnumString, which using when calling config_status would currently
        # break the build, as well as making it inconsistent with re-running
        # config.status. Fortunately, EnumString derives from unicode, so it's
        # covered by converting unicode strings.

        # A lot of the build backend code is currently expecting byte strings
        # and breaks in subtle ways with unicode strings.
        return config_status(args=[], **encode(sanitized_config, encoding))
    return 0
Exemplo n.º 44
0
    def consume_finished(self):
        mk = Makefile()
        # Add the default rule at the very beginning.
        mk.create_rule(["default"])
        mk.add_statement("TOPSRCDIR = %s" % self.environment.topsrcdir)
        mk.add_statement("TOPOBJDIR = %s" % self.environment.topobjdir)

        # Add a few necessary variables inherited from configure
        for var in ("PYTHON", "ACDEFINES", "MOZ_CHROME_FILE_FORMAT"):
            mk.add_statement("%s = %s" % (var, self.environment.substs[var]))

        # Add all necessary information for jar manifest processing
        jar_mn_targets = []

        for path, (objdir, install_target, defines) in self._jar_manifests.iteritems():
            rel_manifest = mozpath.relpath(path, self.environment.topsrcdir)
            target = rel_manifest.replace("/", "-")
            assert target not in jar_mn_targets
            jar_mn_targets.append(target)
            target = "jar-%s" % target
            mk.create_rule([target]).add_dependencies([path])
            if objdir != mozpath.join(self.environment.topobjdir, mozpath.dirname(rel_manifest)):
                mk.create_rule([target]).add_dependencies(["objdir = %s" % objdir])
            if install_target != "dist/bin":
                mk.create_rule([target]).add_dependencies(["install_target = %s" % install_target])
            if defines:
                mk.create_rule([target]).add_dependencies(["defines = %s" % " ".join(defines)])

        mk.add_statement("JAR_MN_TARGETS = %s" % " ".join(jar_mn_targets))

        # Add information for chrome manifest generation
        manifest_targets = []

        for target, entries in self._manifest_entries.iteritems():
            manifest_targets.append(target)
            target = "$(TOPOBJDIR)/%s" % target
            mk.create_rule([target]).add_dependencies(["content = %s" % " ".join('"%s"' % e for e in entries)])

        mk.add_statement("MANIFEST_TARGETS = %s" % " ".join(manifest_targets))

        # Add information for preprocessed files.
        preprocess_targets = []

        for target, (srcdir, f, defines) in self._preprocess_files.iteritems():
            # This matches what PP_TARGETS do in config/rules.
            if target.endswith(".in"):
                target = target[:-3]
                # PP_TARGETS assumes this is true, but doesn't enforce it.
                assert target not in self._preprocess_files
            preprocess_targets.append(target)
            target = "$(TOPOBJDIR)/%s" % target
            mk.create_rule([target]).add_dependencies([mozpath.join(srcdir, f)])
            if defines:
                mk.create_rule([target]).add_dependencies(["defines = %s" % " ".join(defines)])

        mk.add_statement("PP_TARGETS = %s" % " ".join(preprocess_targets))

        # Add information for install manifests.
        mk.add_statement("INSTALL_MANIFESTS = %s" % " ".join(self._install_manifests.keys()))

        mk.add_statement("include $(TOPSRCDIR)/config/faster/rules.mk")

        for base, install_manifest in self._install_manifests.iteritems():
            with self._write_file(
                mozpath.join(self.environment.topobjdir, "faster", "install_%s" % base.replace("/", "_"))
            ) as fh:
                install_manifest.write(fileobj=fh)

        with self._write_file(mozpath.join(self.environment.topobjdir, "faster", "Makefile")) as fh:
            mk.dump(fh, removal_guard=False)
Exemplo n.º 45
0
def main(argv):
    parser = argparse.ArgumentParser('Generate a file from a Python script',
                                     add_help=False)
    parser.add_argument('--locale', metavar='locale', type=str,
                        help='The locale in use.')
    parser.add_argument('python_script', metavar='python-script', type=str,
                        help='The Python script to run')
    parser.add_argument('method_name', metavar='method-name', type=str,
                        help='The method of the script to invoke')
    parser.add_argument('output_file', metavar='output-file', type=str,
                        help='The file to generate')
    parser.add_argument('dep_file', metavar='dep-file', type=str,
                        help='File to write any additional make dependencies to')
    parser.add_argument('dep_target', metavar='dep-target', type=str,
                        help='Make target to use in the dependencies file')
    parser.add_argument('additional_arguments', metavar='arg',
                        nargs=argparse.REMAINDER,
                        help="Additional arguments to the script's main() method")

    args = parser.parse_args(argv)

    kwargs = {}
    if args.locale:
        kwargs['locale'] = args.locale
    script = args.python_script
    # Permit the script to import modules from the same directory in which it
    # resides.  The justification for doing this is that if we were invoking
    # the script as:
    #
    #    python script arg1...
    #
    # then importing modules from the script's directory would come for free.
    # Since we're invoking the script in a roundabout way, we provide this
    # bit of convenience.
    sys.path.append(os.path.dirname(script))
    with open(script, 'r') as fh:
        module = imp.load_module('script', fh, script,
                                 ('.py', 'r', imp.PY_SOURCE))
    method = args.method_name
    if not hasattr(module, method):
        print('Error: script "{0}" is missing a {1} method'.format(script, method),
              file=sys.stderr)
        return 1

    ret = 1
    try:
        with FileAvoidWrite(args.output_file, mode='rb') as output:
            try:
                ret = module.__dict__[method](output, *args.additional_arguments, **kwargs)
            except Exception:
                # Ensure that we don't overwrite the file if the script failed.
                output.avoid_writing_to_file()
                raise

            # The following values indicate a statement of success:
            #  - a set() (see below)
            #  - 0
            #  - False
            #  - None
            #
            # Everything else is an error (so scripts can conveniently |return
            # 1| or similar). If a set is returned, the elements of the set
            # indicate additional dependencies that will be listed in the deps
            # file. Python module imports are automatically included as
            # dependencies.
            if isinstance(ret, set):
                deps = ret
                # The script succeeded, so reset |ret| to indicate that.
                ret = None
            else:
                deps = set()

            # Only write out the dependencies if the script was successful
            if not ret:
                # Add dependencies on any python modules that were imported by
                # the script.
                deps |= set(iter_modules_in_path(buildconfig.topsrcdir,
                                                 buildconfig.topobjdir))
                # Add dependencies on any buildconfig items that were accessed
                # by the script.
                deps |= set(buildconfig.get_dependencies())

                mk = Makefile()
                mk.create_rule([args.dep_target]).add_dependencies(deps)
                with FileAvoidWrite(args.dep_file) as dep_file:
                    mk.dump(dep_file)
            else:
                # Ensure that we don't overwrite the file if the script failed.
                output.avoid_writing_to_file()

    except IOError as e:
        print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
        traceback.print_exc()
        return 1
    return ret
Exemplo n.º 46
0
def config_status(config):
    # Sanitize config data to feed config.status
    # Ideally, all the backend and frontend code would handle the booleans, but
    # there are so many things involved, that it's easier to keep config.status
    # untouched for now.
    def sanitized_bools(v):
        if v is True:
            return '1'
        if v is False:
            return ''
        return v

    sanitized_config = {}
    sanitized_config['substs'] = {
        k: sanitized_bools(v)
        for k, v in config.iteritems()
        if k not in ('DEFINES', 'non_global_defines', 'TOPSRCDIR', 'TOPOBJDIR',
                     'ALL_CONFIGURE_PATHS')
    }
    sanitized_config['defines'] = {
        k: sanitized_bools(v)
        for k, v in config['DEFINES'].iteritems()
    }
    sanitized_config['non_global_defines'] = config['non_global_defines']
    sanitized_config['topsrcdir'] = config['TOPSRCDIR']
    sanitized_config['topobjdir'] = config['TOPOBJDIR']
    sanitized_config['mozconfig'] = config.get('MOZCONFIG')

    # Create config.status. Eventually, we'll want to just do the work it does
    # here, when we're able to skip configure tests/use cached results/not rely
    # on autoconf.
    print("Creating config.status", file=sys.stderr)
    encoding = 'mbcs' if sys.platform == 'win32' else 'utf-8'
    with codecs.open('config.status', 'w', encoding) as fh:
        fh.write(
            textwrap.dedent('''\
            #!%(python)s
            # coding=%(encoding)s
            from __future__ import unicode_literals
            from mozbuild.util import encode
            encoding = '%(encoding)s'
        ''') % {
                'python': config['PYTHON'],
                'encoding': encoding
            })
        # A lot of the build backend code is currently expecting byte
        # strings and breaks in subtle ways with unicode strings. (bug 1296508)
        for k, v in sanitized_config.iteritems():
            fh.write('%s = encode(%s, encoding)\n' % (k, indented_repr(v)))
        fh.write("__all__ = ['topobjdir', 'topsrcdir', 'defines', "
                 "'non_global_defines', 'substs', 'mozconfig']")

        if config.get('MOZ_BUILD_APP') != 'js' or config.get('JS_STANDALONE'):
            fh.write(
                textwrap.dedent('''
                if __name__ == '__main__':
                    from mozbuild.util import patch_main
                    patch_main()
                    from mozbuild.config_status import config_status
                    args = dict([(name, globals()[name]) for name in __all__])
                    config_status(**args)
            '''))

    partial_config = PartialConfigEnvironment(config['TOPOBJDIR'])
    partial_config.write_vars(sanitized_config)

    # Write out a depfile so Make knows to re-run configure when relevant Python
    # changes.
    mk = Makefile()
    rule = mk.create_rule()
    rule.add_targets(["%s/config.status" % config['TOPOBJDIR']])
    rule.add_dependencies(
        itertools.chain(
            config['ALL_CONFIGURE_PATHS'],
            iter_modules_in_path(config['TOPOBJDIR'], config['TOPSRCDIR'])))
    with open('configure.d', 'w') as fh:
        mk.dump(fh)

    # Other things than us are going to run this file, so we need to give it
    # executable permissions.
    os.chmod('config.status', 0o755)
    if config.get('MOZ_BUILD_APP') != 'js' or config.get('JS_STANDALONE'):
        from mozbuild.config_status import config_status

        # Some values in sanitized_config also have more complex types, such as
        # EnumString, which using when calling config_status would currently
        # break the build, as well as making it inconsistent with re-running
        # config.status. Fortunately, EnumString derives from unicode, so it's
        # covered by converting unicode strings.

        # A lot of the build backend code is currently expecting byte strings
        # and breaks in subtle ways with unicode strings.
        return config_status(args=[], **encode(sanitized_config, encoding))
    return 0
Exemplo n.º 47
0
    def consume_finished(self):
        mk = Makefile()
        # Add the default rule at the very beginning.
        mk.create_rule(['default'])
        mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
        mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)
        if not self._has_xpidl:
            mk.add_statement('NO_XPIDL = 1')

        # Add a few necessary variables inherited from configure
        for var in (
                'PYTHON',
                'ACDEFINES',
                'MOZ_BUILD_APP',
                'MOZ_WIDGET_TOOLKIT',
        ):
            value = self.environment.substs.get(var)
            if value is not None:
                mk.add_statement('%s = %s' % (var, value))

        install_manifests_bases = self._install_manifests.keys()

        # Add information for chrome manifest generation
        manifest_targets = []

        for target, entries in self._manifest_entries.iteritems():
            manifest_targets.append(target)
            install_target = mozpath.basedir(target, install_manifests_bases)
            self._install_manifests[install_target].add_content(
                ''.join('%s\n' % e for e in sorted(entries)),
                mozpath.relpath(target, install_target))

        # Add information for install manifests.
        mk.add_statement('INSTALL_MANIFESTS = %s' %
                         ' '.join(self._install_manifests.keys()))

        # Add dependencies we infered:
        for target, deps in self._dependencies.iteritems():
            mk.create_rule([target]).add_dependencies('$(TOPOBJDIR)/%s' % d
                                                      for d in deps)

        mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')

        for base, install_manifest in self._install_manifests.iteritems():
            with self._write_file(
                    mozpath.join(self.environment.topobjdir, 'faster',
                                 'install_%s' % base.replace('/', '_'))) as fh:
                install_manifest.write(fileobj=fh)

        # For artifact builds only, write a single unified manifest for consumption by |mach watch|.
        if self.environment.is_artifact_build:
            unified_manifest = InstallManifest()
            for base, install_manifest in self._install_manifests.iteritems():
                # Expect 'dist/bin/**', which includes 'dist/bin' with no trailing slash.
                assert base.startswith('dist/bin')
                base = base[len('dist/bin'):]
                if base and base[0] == '/':
                    base = base[1:]
                unified_manifest.add_entries_from(install_manifest, base=base)

            with self._write_file(
                    mozpath.join(self.environment.topobjdir, 'faster',
                                 'unified_install_dist_bin')) as fh:
                unified_manifest.write(fileobj=fh)

        with self._write_file(
                mozpath.join(self.environment.topobjdir, 'faster',
                             'Makefile')) as fh:
            mk.dump(fh, removal_guard=False)
Exemplo n.º 48
0
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.STDOUT)
        except Exception, e:
            print >> sys.stderr, 'error: Launching', args, ':', e
            raise e
        (stdout, stderr) = proc.communicate()
        if proc.returncode and not options.verbose:
            print_command(sys.stderr, args)
        sys.stderr.write(stdout)
        sys.stderr.flush()
        if proc.returncode:
            exit(proc.returncode)
    if not options.depend:
        return
    ensureParentDir(options.depend)
    mk = Makefile()
    deps = [
        dep for dep in deps if os.path.isfile(dep) and dep != options.target
    ]
    no_dynamic_lib = [dep for dep in deps if not isDynamicLib(dep)]
    mk.create_rule([options.target]).add_dependencies(no_dynamic_lib)
    if len(deps) != len(no_dynamic_lib):
        mk.create_rule(['%s_order_only' % options.target
                        ]).add_dependencies(dep for dep in deps
                                            if isDynamicLib(dep))

    with open(options.depend, 'w') as depfile:
        mk.dump(depfile, removal_guard=True)


if __name__ == '__main__':
Exemplo n.º 49
0
    def consume_finished(self):
        mk = Makefile()
        # Add the default rule at the very beginning.
        mk.create_rule(["default"])
        mk.add_statement("TOPSRCDIR = %s" % self.environment.topsrcdir)
        mk.add_statement("TOPOBJDIR = %s" % self.environment.topobjdir)
        mk.add_statement("MDDEPDIR = .deps")
        mk.add_statement("TOUCH ?= touch")
        mk.add_statement("include $(TOPSRCDIR)/config/makefiles/functions.mk")
        mk.add_statement("include $(TOPSRCDIR)/config/AB_rCD.mk")
        mk.add_statement("AB_CD = en-US")
        if not self._has_xpidl:
            mk.add_statement("NO_XPIDL = 1")

        # Add a few necessary variables inherited from configure
        for var in (
                "PYTHON3",
                "ACDEFINES",
                "MOZ_BUILD_APP",
                "MOZ_WIDGET_TOOLKIT",
        ):
            value = self.environment.substs.get(var)
            if value is not None:
                mk.add_statement("%s = %s" % (var, value))

        install_manifests_bases = self._install_manifests.keys()

        # Add information for chrome manifest generation
        manifest_targets = []

        for target, entries in six.iteritems(self._manifest_entries):
            manifest_targets.append(target)
            install_target = mozpath.basedir(target, install_manifests_bases)
            self._install_manifests[install_target].add_content(
                "".join("%s\n" % e for e in sorted(entries)),
                mozpath.relpath(target, install_target),
            )

        # Add information for install manifests.
        mk.add_statement("INSTALL_MANIFESTS = %s" %
                         " ".join(sorted(self._install_manifests.keys())))

        # Add dependencies we inferred:
        for target, deps in sorted(six.iteritems(self._dependencies)):
            mk.create_rule([target]).add_dependencies("$(TOPOBJDIR)/%s" % d
                                                      for d in sorted(deps))

        # This is not great, but it's better to have some dependencies on these Python files.
        python_deps = [
            "$(TOPSRCDIR)/python/mozbuild/mozbuild/action/l10n_merge.py",
            "$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/compare.py",
            "$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/paths.py",
        ]
        # Add l10n dependencies we inferred:
        for target, deps in sorted(six.iteritems(self._l10n_dependencies)):
            mk.create_rule([target]).add_dependencies(
                "%s" % d[0] for d in sorted(deps, key=itemgetter(0)))
            for (merge, ref_file, l10n_file) in deps:
                rule = mk.create_rule([
                    merge
                ]).add_dependencies([ref_file, l10n_file] + python_deps)
                rule.add_commands([
                    "$(PYTHON3) -m mozbuild.action.l10n_merge "
                    "--output {} --ref-file {} --l10n-file {}".format(
                        merge, ref_file, l10n_file)
                ])
                # Add a dummy rule for the l10n file since it might not exist.
                mk.create_rule([l10n_file])

        mk.add_statement("include $(TOPSRCDIR)/config/faster/rules.mk")

        for base, install_manifest in six.iteritems(self._install_manifests):
            with self._write_file(
                    mozpath.join(
                        self.environment.topobjdir,
                        "faster",
                        "install_%s" % base.replace("/", "_"),
                    )) as fh:
                install_manifest.write(fileobj=fh)

        # Write a single unified manifest for consumption by |mach watch|.
        # Since this doesn't start 'install_', it's not processed by the build.
        unified_manifest = InstallManifest()
        for base, install_manifest in six.iteritems(self._install_manifests):
            # Expect 'dist/bin/**', which includes 'dist/bin' with no trailing slash.
            assert base.startswith("dist/bin")
            base = base[len("dist/bin"):]
            if base and base[0] == "/":
                base = base[1:]
            unified_manifest.add_entries_from(install_manifest, base=base)

        with self._write_file(
                mozpath.join(self.environment.topobjdir, "faster",
                             "unified_install_dist_bin")) as fh:
            unified_manifest.write(fileobj=fh)

        for obj in self._generated_files:
            for stmt in self._format_statements_for_generated_file(
                    obj, "default"):
                mk.add_statement(stmt)

        with self._write_file(
                mozpath.join(self.environment.topobjdir, "faster",
                             "Makefile")) as fh:
            mk.dump(fh, removal_guard=False)
Exemplo n.º 50
0
    def generate_build_files(self):
        """Generate files required for the build.

        This function is in charge of generating all the .h/.cpp files derived
        from input .webidl files. Please note that there are build actions
        required to produce .webidl files and these build actions are
        explicitly not captured here: this function assumes all .webidl files
        are present and up to date.

        This routine is called as part of the build to ensure files that need
        to exist are present and up to date. This routine may not be called if
        the build dependencies (generated as a result of calling this the first
        time) say everything is up to date.

        Because reprocessing outputs for every .webidl on every invocation
        is expensive, we only regenerate the minimal set of files on every
        invocation. The rules for deciding what needs done are roughly as
        follows:

        1. If any .webidl changes, reparse all .webidl files and regenerate
           the global derived files. Only regenerate output files (.h/.cpp)
           impacted by the modified .webidl files.
        2. If an non-.webidl dependency (Python files, config file) changes,
           assume everything is out of date and regenerate the world. This
           is because changes in those could globally impact every output
           file.
        3. If an output file is missing, ensure it is present by performing
           necessary regeneration.
        """
        # Despite #1 above, we assume the build system is smart enough to not
        # invoke us if nothing has changed. Therefore, any invocation means
        # something has changed. And, if anything has changed, we need to
        # parse the WebIDL.
        self._parse_webidl()

        result = BuildResult()

        # If we parse, we always update globals - they are cheap and it is
        # easier that way.
        created, updated, unchanged = self._write_global_derived()
        result.created |= created
        result.updated |= updated
        result.unchanged |= unchanged

        # If any of the extra dependencies changed, regenerate the world.
        global_changed, global_hashes = self._global_dependencies_changed()
        if global_changed:
            # Make a copy because we may modify.
            changed_inputs = set(self._input_paths)
        else:
            changed_inputs = self._compute_changed_inputs()

        self._state['global_depends'] = global_hashes

        # Generate bindings from .webidl files.
        for filename in sorted(changed_inputs):
            basename = mozpath.basename(filename)
            result.inputs.add(filename)
            written, deps = self._generate_build_files_for_webidl(filename)
            result.created |= written[0]
            result.updated |= written[1]
            result.unchanged |= written[2]

            self._state['webidls'][basename] = dict(
                filename=filename,
                outputs=written[0] | written[1] | written[2],
                inputs=set(deps),
                sha1=self._input_hashes[filename],
            )

        # Process some special interfaces required for testing.
        for interface in self._example_interfaces:
            written = self.generate_example_files(interface)
            result.created |= written[0]
            result.updated |= written[1]
            result.unchanged |= written[2]

        # Generate a make dependency file.
        if self._make_deps_path:
            mk = Makefile()
            codegen_rule = mk.create_rule([self._make_deps_target])
            codegen_rule.add_dependencies(global_hashes.keys())
            codegen_rule.add_dependencies(self._input_paths)

            with FileAvoidWrite(self._make_deps_path) as fh:
                mk.dump(fh)

        self._save_state()

        return result
Exemplo n.º 51
0
    def consume_finished(self):
        mk = Makefile()
        # Add the default rule at the very beginning.
        mk.create_rule(['default'])
        mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
        mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)
        mk.add_statement('BACKEND = %s' % self._backend_output_list_file)
        if not self._has_xpidl:
            mk.add_statement('NO_XPIDL = 1')

        # Add a few necessary variables inherited from configure
        for var in (
                'PYTHON',
                'ACDEFINES',
                'MOZ_BUILD_APP',
                'MOZ_WIDGET_TOOLKIT',
        ):
            mk.add_statement('%s = %s' % (var, self.environment.substs[var]))

        install_manifests_bases = self._install_manifests.keys()

        # Add information for chrome manifest generation
        manifest_targets = []

        for target, entries in self._manifest_entries.iteritems():
            manifest_targets.append(target)
            install_target = mozpath.basedir(target, install_manifests_bases)
            self._install_manifests[install_target].add_content(
                ''.join('%s\n' % e for e in sorted(entries)),
                mozpath.relpath(target, install_target))

        # Add information for install manifests.
        mk.add_statement('INSTALL_MANIFESTS = %s' %
                         ' '.join(self._install_manifests.keys()))

        # Add dependencies we infered:
        for target, deps in self._dependencies.iteritems():
            mk.create_rule([target]).add_dependencies('$(TOPOBJDIR)/%s' % d
                                                      for d in deps)

        # Add backend dependencies:
        mk.create_rule([self._backend_output_list_file
                        ]).add_dependencies(self.backend_input_files)

        mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')

        for base, install_manifest in self._install_manifests.iteritems():
            with self._write_file(
                    mozpath.join(self.environment.topobjdir, 'faster',
                                 'install_%s' % base.replace('/', '_'))) as fh:
                install_manifest.write(fileobj=fh)

        with self._write_file(
                mozpath.join(self.environment.topobjdir, 'faster',
                             'Makefile')) as fh:
            mk.dump(fh, removal_guard=False)