Example #1
0
    def consume_finished(self):
        CommonBackend.consume_finished(self)

        for srcdir in sorted(self._backend_files.keys()):
            with self._write_file(fh=self._backend_files[srcdir]) as bf:
                makefile_in = os.path.join(srcdir, 'Makefile.in')
                makefile = os.path.join(bf.objdir, 'Makefile')

                # If Makefile.in exists, use it as a template. Otherwise,
                # create a stub.
                stub = not os.path.exists(makefile_in)
                if not stub:
                    self.log(logging.DEBUG, 'substitute_makefile',
                             {'path': makefile},
                             'Substituting makefile: {path}')

                    # Adding the Makefile.in here has the desired side-effect
                    # that if the Makefile.in disappears, this will force
                    # moz.build traversal. This means that when we remove empty
                    # Makefile.in files, the old file will get replaced with
                    # the autogenerated one automatically.
                    self.backend_input_files.add(makefile_in)

                    for skiplist in self._may_skip.values():
                        if bf.relobjdir in skiplist:
                            skiplist.remove(bf.relobjdir)
                else:
                    self.log(logging.DEBUG, 'stub_makefile',
                             {'path': makefile},
                             'Creating stub Makefile: {path}')

                # Can't skip directories with a jar.mn for the 'libs' tier.
                if bf.relobjdir in self._may_skip['libs'] and \
                        os.path.exists(os.path.join(srcdir, 'jar.mn')):
                    self._may_skip['libs'].remove(bf.relobjdir)

                with self._write_file(makefile) as fh:
                    bf.environment.create_makefile(fh, stub=stub)

        self._fill_root_mk()

        # Write out a master list of all IPDL source files.
        ipdl_dir = os.path.join(self.environment.topobjdir, 'ipc', 'ipdl')
        mk = mozmakeutil.Makefile()

        sorted_ipdl_sources = list(sorted(self._ipdl_sources))
        mk.add_statement('ALL_IPDLSRCS := %s' % ' '.join(sorted_ipdl_sources))

        def files_from(ipdl):
            base = os.path.basename(ipdl)
            root, ext = os.path.splitext(base)

            # Both .ipdl and .ipdlh become .cpp files
            files = ['%s.cpp' % root]
            if ext == '.ipdl':
                # .ipdl also becomes Child/Parent.cpp files
                files.extend(['%sChild.cpp' % root, '%sParent.cpp' % root])
            return files

        ipdl_cppsrcs = list(
            itertools.chain(*[files_from(p) for p in sorted_ipdl_sources]))
        self._add_unified_build_rules(
            mk,
            ipdl_cppsrcs,
            ipdl_dir,
            unified_prefix='UnifiedProtocols',
            unified_files_makefile_variable='CPPSRCS')

        mk.add_statement('IPDLDIRS := %s' % ' '.join(
            sorted(set(os.path.dirname(p) for p in self._ipdl_sources))))

        with self._write_file(os.path.join(ipdl_dir, 'ipdlsrcs.mk')) as ipdls:
            mk.dump(ipdls, removal_guard=False)

        # Write out master lists of WebIDL source files.
        bindings_dir = os.path.join(self.environment.topobjdir, 'dom',
                                    'bindings')

        mk = mozmakeutil.Makefile()

        def write_var(variable, sources):
            files = [os.path.basename(f) for f in sorted(sources)]
            mk.add_statement('%s += %s' % (variable, ' '.join(files)))

        write_var('webidl_files', self._webidl_sources)
        write_var('generated_events_webidl_files',
                  self._generated_events_webidl_sources)
        write_var('test_webidl_files', self._test_webidl_sources)
        write_var('preprocessed_test_webidl_files',
                  self._preprocessed_test_webidl_sources)
        write_var('generated_webidl_files', self._generated_webidl_sources)
        write_var('preprocessed_webidl_files',
                  self._preprocessed_webidl_sources)

        all_webidl_files = itertools.chain(
            iter(self._webidl_sources),
            iter(self._generated_events_webidl_sources),
            iter(self._generated_webidl_sources),
            iter(self._preprocessed_webidl_sources))
        all_webidl_files = [os.path.basename(x) for x in all_webidl_files]
        all_webidl_sources = [
            re.sub(r'\.webidl$', 'Binding.cpp', x) for x in all_webidl_files
        ]

        self._add_unified_build_rules(
            mk,
            all_webidl_sources,
            bindings_dir,
            unified_prefix='UnifiedBindings',
            unified_files_makefile_variable='unified_binding_cpp_files')

        # Assume that Somebody Else has responsibility for correctly
        # specifying removal dependencies for |all_webidl_sources|.
        with self._write_file(os.path.join(bindings_dir,
                                           'webidlsrcs.mk')) as webidls:
            mk.dump(webidls, removal_guard=False)

        # Write out a dependency file used to determine whether a config.status
        # re-run is needed.
        inputs = sorted(
            p.replace(os.sep, '/') for p in self.backend_input_files)

        # We need to use $(DEPTH) so the target here matches what's in
        # rules.mk. If they are different, the dependencies don't get pulled in
        # properly.
        with self._write_file('%s.pp' %
                              self._backend_output_list_file) as backend_deps:
            backend_deps.write('$(DEPTH)/backend.%s: %s\n' %
                               (self.__class__.__name__, ' '.join(inputs)))
            for path in inputs:
                backend_deps.write('%s:\n' % path)

        # Make the master test manifest files.
        for flavor, t in self._test_manifests.items():
            install_prefix, manifests = t
            manifest_stem = os.path.join(install_prefix, '%s.ini' % flavor)
            self._write_master_test_manifest(
                os.path.join(self.environment.topobjdir, '_tests',
                             manifest_stem), manifests)

            # Catch duplicate inserts.
            try:
                self._install_manifests['tests'].add_optional_exists(
                    manifest_stem)
            except ValueError:
                pass

        self._write_manifests('install', self._install_manifests)

        ensureParentDir(os.path.join(self.environment.topobjdir, 'dist',
                                     'foo'))
Example #2
0
    def consume_finished(self):
        CommonBackend.consume_finished(self)

        for objdir, backend_file in sorted(self._backend_files.items()):
            srcdir = backend_file.srcdir
            with self._write_file(fh=backend_file) as bf:
                makefile_in = mozpath.join(srcdir, 'Makefile.in')
                makefile = mozpath.join(objdir, 'Makefile')

                # If Makefile.in exists, use it as a template. Otherwise,
                # create a stub.
                stub = not os.path.exists(makefile_in)
                if not stub:
                    self.log(logging.DEBUG, 'substitute_makefile',
                        {'path': makefile}, 'Substituting makefile: {path}')
                    self.summary.makefile_in_count += 1

                    for tier, skiplist in self._may_skip.items():
                        if tier in ('compile', 'binaries'):
                            continue
                        if bf.relobjdir in skiplist:
                            skiplist.remove(bf.relobjdir)
                else:
                    self.log(logging.DEBUG, 'stub_makefile',
                        {'path': makefile}, 'Creating stub Makefile: {path}')

                # Can't skip directories with a jar.mn for the 'libs' tier.
                if bf.relobjdir in self._may_skip['libs'] and \
                        os.path.exists(mozpath.join(srcdir, 'jar.mn')):
                    self._may_skip['libs'].remove(bf.relobjdir)

                obj = self.Substitution()
                obj.output_path = makefile
                obj.input_path = makefile_in
                obj.topsrcdir = bf.environment.topsrcdir
                obj.topobjdir = bf.environment.topobjdir
                self._create_makefile(obj, stub=stub)

        # Write out a master list of all IPDL source files.
        ipdl_dir = mozpath.join(self.environment.topobjdir, 'ipc', 'ipdl')
        mk = mozmakeutil.Makefile()

        sorted_ipdl_sources = list(sorted(self._ipdl_sources))
        mk.add_statement('ALL_IPDLSRCS := %s' % ' '.join(sorted_ipdl_sources))

        def files_from(ipdl):
            base = mozpath.basename(ipdl)
            root, ext = mozpath.splitext(base)

            # Both .ipdl and .ipdlh become .cpp files
            files = ['%s.cpp' % root]
            if ext == '.ipdl':
                # .ipdl also becomes Child/Parent.cpp files
                files.extend(['%sChild.cpp' % root,
                              '%sParent.cpp' % root])
            return files

        ipdl_cppsrcs = list(itertools.chain(*[files_from(p) for p in sorted_ipdl_sources]))
        self._add_unified_build_rules(mk, ipdl_cppsrcs, ipdl_dir,
                                      unified_prefix='UnifiedProtocols',
                                      unified_files_makefile_variable='CPPSRCS')

        mk.add_statement('IPDLDIRS := %s' % ' '.join(sorted(set(mozpath.dirname(p)
            for p in self._ipdl_sources))))

        with self._write_file(mozpath.join(ipdl_dir, 'ipdlsrcs.mk')) as ipdls:
            mk.dump(ipdls, removal_guard=False)

        # These contain autogenerated sources that the build config doesn't
        # yet know about.
        # TODO Emit GENERATED_SOURCES so these special cases are dealt with
        # the proper way.
        self._may_skip['compile'] -= {'ipc/ipdl'}
        self._may_skip['compile'] -= {'dom/bindings', 'dom/bindings/test'}

        self._fill_root_mk()

        # Write out a dependency file used to determine whether a config.status
        # re-run is needed.
        inputs = sorted(p.replace(os.sep, '/') for p in self.backend_input_files)

        # We need to use $(DEPTH) so the target here matches what's in
        # rules.mk. If they are different, the dependencies don't get pulled in
        # properly.
        with self._write_file('%s.pp' % self._backend_output_list_file) as backend_deps:
            backend_deps.write('$(DEPTH)/backend.%s: %s\n' %
                (self.__class__.__name__, ' '.join(inputs)))
            for path in inputs:
                backend_deps.write('%s:\n' % path)

        with open(self._backend_output_list_file, 'a'):
            pass
        os.utime(self._backend_output_list_file, None)

        # Make the master test manifest files.
        for flavor, t in self._test_manifests.items():
            install_prefix, manifests = t
            manifest_stem = mozpath.join(install_prefix, '%s.ini' % flavor)
            self._write_master_test_manifest(mozpath.join(
                self.environment.topobjdir, '_tests', manifest_stem),
                manifests)

            # Catch duplicate inserts.
            try:
                self._install_manifests['tests'].add_optional_exists(manifest_stem)
            except ValueError:
                pass

        self._write_manifests('install', self._install_manifests)

        ensureParentDir(mozpath.join(self.environment.topobjdir, 'dist', 'foo'))