예제 #1
0
    def test_diff_not_default(self):
        """Diffs are not produced by default."""

        faw = FileAvoidWrite('doesnotexist')
        faw.write('dummy')
        faw.close()
        self.assertIsNone(faw.diff)
예제 #2
0
def main():
    from argparse import ArgumentParser
    o = ArgumentParser()
    o.add_argument('-I', action='append', dest='incdirs', default=['.'],
                 help="Directory to search for imported files")
    o.add_argument('config',
                 help='Config file to load')
    o.add_argument('header_output', metavar='FILE',
                 help="Quick stub header output file")
    o.add_argument('stub_output', metavar='FILE',
                 help="C++ source output file")
    o.add_argument('makedepend_output', metavar='FILE',
                 help="gnumake dependencies output file")
    global options
    options = o.parse_args()

    # Instantiate the parser.
    global p
    p = xpidl.IDLParser()

    conf = readConfigFile(options.config)

    with FileAvoidWrite(options.header_output) as fh:
        idl_paths = print_header_file(fh, conf)
    with FileAvoidWrite(options.stub_output) as fh:
        idl_paths |= print_cpp_file(fh, conf)
    with FileAvoidWrite(options.makedepend_output) as fh:
        write_dep_makefile(fh, options.stub_output, idl_paths)
예제 #3
0
    def _fill_group(self, values):
        # Clear out any cached values. This is mostly for tests that will check
        # the environment, write out a new set of variables, and then check the
        # environment again. Normally only configure ends up calling this
        # function, and other consumers create their own
        # PartialConfigEnvironments in new python processes.
        self._dict = {}

        existing_files = self._load_config_track()

        new_files = set()
        for k, v in six.iteritems(values):
            new_files.add(self._write_file(k, v))

        for filename in existing_files - new_files:
            # We can't actually os.remove() here, since make would not see that the
            # file has been removed and that the target needs to be updated. Instead
            # we just overwrite the file with a value of None, which is equivalent
            # to a non-existing file.
            with FileAvoidWrite(filename) as fh:
                json.dump(None, fh)

        with FileAvoidWrite(self._config_track) as fh:
            for f in sorted(new_files):
                fh.write("%s\n" % f)
예제 #4
0
    def test_diff_not_default(self):
        """Diffs are not produced by default."""

        with MockedOpen({'file': 'old'}):
            faw = FileAvoidWrite('file')
            faw.write('dummy')
            faw.close()
            self.assertIsNone(faw.diff)
예제 #5
0
def test_store_new_contents(tmp_path):
    file = tmp_path / "file.txt"

    faw = FileAvoidWrite(str(file))
    faw.write("content")

    assert faw.close() == (False, True)
    assert file.read_text() == "content"
예제 #6
0
    def test_diff_not_default(self):
        """Diffs are not produced by default."""

        with MockedOpen({"file": "old"}):
            faw = FileAvoidWrite("file")
            faw.write("dummy")
            faw.close()
            self.assertIsNone(faw.diff)
예제 #7
0
    def copy(self, dest, skip_if_older=True):
        '''
        Invokes the preprocessor to create the destination file.
        '''
        if isinstance(dest, six.string_types):
            dest = Dest(dest)
        else:
            assert isinstance(dest, Dest)

        # We have to account for the case where the destination exists and is a
        # symlink to something. Since we know the preprocessor is certainly not
        # going to create a symlink, we can just remove the existing one. If the
        # destination is not a symlink, we leave it alone, since we're going to
        # overwrite its contents anyway.
        # If symlinks aren't supported at all, we can skip this step.
        # See comment in AbsoluteSymlinkFile about Windows.
        if hasattr(os, 'symlink') and platform.system() != 'Windows':
            if os.path.islink(dest.path):
                os.remove(dest.path)

        pp_deps = set(self.extra_depends)

        # If a dependency file was specified, and it exists, add any
        # dependencies from that file to our list.
        if self.depfile and os.path.exists(self.depfile):
            target = mozpath.normpath(dest.name)
            with _open(self.depfile, 'rt') as fileobj:
                for rule in makeutil.read_dep_makefile(fileobj):
                    if target in rule.targets():
                        pp_deps.update(rule.dependencies())

        skip = False
        if dest.exists() and skip_if_older:
            # If a dependency file was specified, and it doesn't exist,
            # assume that the preprocessor needs to be rerun. That will
            # regenerate the dependency file.
            if self.depfile and not os.path.exists(self.depfile):
                skip = False
            else:
                skip = not BaseFile.any_newer(dest.path, pp_deps)

        if skip:
            return False

        deps_out = None
        if self.depfile:
            deps_out = FileAvoidWrite(self.depfile)
        pp = Preprocessor(defines=self.defines, marker=self.marker)
        pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings)

        with _open(self.path, 'rU') as input:
            pp.processFile(input=input, output=dest, depfile=deps_out)

        dest.close()
        if self.depfile:
            deps_out.close()

        return True
예제 #8
0
파일: files.py 프로젝트: luke-chang/gecko-1
    def copy(self, dest, skip_if_older=True):
        '''
        Invokes the preprocessor to create the destination file.
        '''
        if isinstance(dest, basestring):
            dest = Dest(dest)
        else:
            assert isinstance(dest, Dest)

        # We have to account for the case where the destination exists and is a
        # symlink to something. Since we know the preprocessor is certainly not
        # going to create a symlink, we can just remove the existing one. If the
        # destination is not a symlink, we leave it alone, since we're going to
        # overwrite its contents anyway.
        # If symlinks aren't supported at all, we can skip this step.
        if hasattr(os, 'symlink'):
            if os.path.islink(dest.path):
                os.remove(dest.path)

        pp_deps = set(self.extra_depends)

        # If a dependency file was specified, and it exists, add any
        # dependencies from that file to our list.
        if self.depfile and os.path.exists(self.depfile):
            target = mozpath.normpath(dest.name)
            with open(self.depfile, 'rb') as fileobj:
                for rule in makeutil.read_dep_makefile(fileobj):
                    if target in rule.targets():
                        pp_deps.update(rule.dependencies())

        skip = False
        if dest.exists() and skip_if_older:
            # If a dependency file was specified, and it doesn't exist,
            # assume that the preprocessor needs to be rerun. That will
            # regenerate the dependency file.
            if self.depfile and not os.path.exists(self.depfile):
                skip = False
            else:
                skip = not BaseFile.any_newer(dest.path, pp_deps)

        if skip:
            return False

        deps_out = None
        if self.depfile:
            deps_out = FileAvoidWrite(self.depfile)
        pp = Preprocessor(defines=self.defines, marker=self.marker)
        pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings)

        with open(self.path, 'rU') as input:
            pp.processFile(input=input, output=dest, depfile=deps_out)

        dest.close()
        if self.depfile:
            deps_out.close()

        return True
예제 #9
0
def test_change_binary_file_contents(tmp_path):
    file = tmp_path / "file.dat"
    file.write_bytes(b"\0")

    faw = FileAvoidWrite(str(file), readmode="rb")
    faw.write(b"\0\0\0")

    assert faw.close() == (True, True)
    assert file.read_bytes() == b"\0\0\0"
예제 #10
0
def test_overwrite_contents(tmp_path):
    file = tmp_path / "file.txt"
    file.write_text("abc")

    faw = FileAvoidWrite(str(file))
    faw.write("bazqux")

    assert faw.close() == (True, True)
    assert file.read_text() == "bazqux"
예제 #11
0
def process(input_dirs, inc_paths, bindings_conf, cache_dir, header_dir,
            xpcrs_dir, xpt_dir, deps_dir, module, idl_files):
    p = IDLParser(outputdir=cache_dir)

    xpts = []
    mk = Makefile()
    rule = mk.create_rule()

    glbl = {}
    execfile(bindings_conf, glbl)
    webidlconfig = glbl['DOMInterfaces']

    # Write out dependencies for Python modules we import. If this list isn't
    # up to date, we will not re-process XPIDL files if the processor changes.
    rule.add_dependencies(six.ensure_text(s) for s in
                          iter_modules_in_path(topsrcdir))

    for path in idl_files:
        basename = os.path.basename(path)
        stem, _ = os.path.splitext(basename)
        idl_data = open(path).read()

        idl = p.parse(idl_data, filename=path)
        idl.resolve(inc_paths, p, webidlconfig)

        header_path = os.path.join(header_dir, '%s.h' % stem)
        rs_rt_path = os.path.join(xpcrs_dir, 'rt', '%s.rs' % stem)
        rs_bt_path = os.path.join(xpcrs_dir, 'bt', '%s.rs' % stem)

        xpts.append(jsonxpt.build_typelib(idl))

        rule.add_dependencies(six.ensure_text(s) for s in idl.deps)

        with FileAvoidWrite(header_path) as fh:
            print_header(idl, fh, path)

        with FileAvoidWrite(rs_rt_path) as fh:
            print_rust_bindings(idl, fh, path)

        with FileAvoidWrite(rs_bt_path) as fh:
            print_rust_macros_bindings(idl, fh, path)

    # NOTE: We don't use FileAvoidWrite here as we may re-run this code due to a
    # number of different changes in the code, which may not cause the .xpt
    # files to be changed in any way. This means that make will re-run us every
    # time a build is run whether or not anything changed. To fix this we
    # unconditionally write out the file.
    xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
    with open(xpt_path, 'w') as fh:
        jsonxpt.write(jsonxpt.link(xpts), fh)

    rule.add_targets([six.ensure_text(xpt_path)])
    if deps_dir:
        deps_path = os.path.join(deps_dir, '%s.pp' % module)
        with FileAvoidWrite(deps_path) as fh:
            mk.dump(fh)
예제 #12
0
    def install_from_file(self, filename, distdir):
        self.log(logging.INFO, "artifact", {"filename": filename}, "Installing from {filename}")

        # Do we need to post-process?
        processed_filename = filename + PROCESSED_SUFFIX

        if self._skip_cache and os.path.exists(processed_filename):
            self.log(
                logging.DEBUG,
                "artifact",
                {"path": processed_filename},
                "Skipping cache: removing cached processed artifact {path}",
            )
            os.remove(processed_filename)

        if not os.path.exists(processed_filename):
            self.log(logging.INFO, "artifact", {"filename": filename}, "Processing contents of {filename}")
            self.log(
                logging.INFO,
                "artifact",
                {"processed_filename": processed_filename},
                "Writing processed {processed_filename}",
            )
            self._artifact_job.process_artifact(filename, processed_filename)

        self.log(
            logging.INFO,
            "artifact",
            {"processed_filename": processed_filename},
            "Installing from processed {processed_filename}",
        )

        # Copy all .so files, avoiding modification where possible.
        ensureParentDir(mozpath.join(distdir, ".dummy"))

        with zipfile.ZipFile(processed_filename) as zf:
            for info in zf.infolist():
                if info.filename.endswith(".ini"):
                    continue
                n = mozpath.join(distdir, info.filename)
                fh = FileAvoidWrite(n, mode="rb")
                shutil.copyfileobj(zf.open(info), fh)
                file_existed, file_updated = fh.close()
                self.log(
                    logging.INFO,
                    "artifact",
                    {"updating": "Updating" if file_updated else "Not updating", "filename": n},
                    "{updating} {filename}",
                )
                if not file_existed or file_updated:
                    # Libraries and binaries may need to be marked executable,
                    # depending on platform.
                    perms = info.external_attr >> 16  # See http://stackoverflow.com/a/434689.
                    perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH  # u+w, a+r.
                    os.chmod(n, perms)
        return 0
예제 #13
0
def test_no_write_happens_if_file_contents_same(tmp_path):
    file = tmp_path / "file.txt"
    file.write_text("content")
    original_write_time = file.stat().st_mtime

    faw = FileAvoidWrite(str(file))
    faw.write("content")

    assert faw.close() == (True, False)
    assert file.stat().st_mtime == original_write_time
예제 #14
0
    def test_file_avoid_write(self):
        with MockedOpen({'file': 'content'}):
            # Overwriting an existing file replaces its content
            with FileAvoidWrite('file') as file:
                file.write('bazqux')
            self.assertEqual(open('file', 'r').read(), 'bazqux')

            # Creating a new file (obviously) stores its content
            with FileAvoidWrite('file2') as file:
                file.write('content')
            self.assertEqual(open('file2').read(), 'content')

        class MyMockedOpen(MockedOpen):
            '''MockedOpen extension to raise an exception if something
            attempts to write in an opened file.
            '''
            def __call__(self, name, mode):
                if 'w' in mode:
                    raise Exception, 'Unexpected open with write mode'
                return MockedOpen.__call__(self, name, mode)

        with MyMockedOpen({'file': 'content'}):
            # Validate that MyMockedOpen works as intended
            file = FileAvoidWrite('file')
            file.write('foobar')
            self.assertRaises(Exception, file.close)

            # Check that no write actually happens when writing the
            # same content as what already is in the file
            with FileAvoidWrite('file') as file:
                file.write('content')
예제 #15
0
def emit_code(fd, pref_list_filename):
    pp = Preprocessor()
    pp.context.update(buildconfig.defines["ALLDEFINES"])

    # A necessary hack until MOZ_DEBUG_FLAGS are part of buildconfig.defines.
    if buildconfig.substs.get("MOZ_DEBUG"):
        pp.context["DEBUG"] = "1"

    if buildconfig.substs.get("CPU_ARCH") == "aarch64":
        pp.context["MOZ_AARCH64"] = True

    pp.out = StringIO()
    pp.do_filter("substitution")
    pp.do_include(pref_list_filename)

    try:
        pref_list = yaml.safe_load(pp.out.getvalue())
        input_file = os.path.relpath(
            pref_list_filename,
            os.environ.get("GECKO_PATH", os.environ.get("TOPSRCDIR")),
        )
        code = generate_code(pref_list, input_file)
    except (IOError, ValueError) as e:
        print("{}: error:\n  {}\n".format(pref_list_filename, e))
        sys.exit(1)

    # When generating multiple files from a script, the build system treats the
    # first named output file (StaticPrefListAll.h in this case) specially -- it
    # is created elsewhere, and written to via `fd`.
    fd.write(code["static_pref_list_all_h"])

    # We must create the remaining output files ourselves. This requires
    # creating the output directory directly if it doesn't already exist.
    ensureParentDir(fd.name)
    init_dirname = os.path.dirname(fd.name)

    with FileAvoidWrite("StaticPrefsAll.h") as fd:
        fd.write(code["static_prefs_all_h"])

    for group, text in sorted(code["static_pref_list_group_h"].items()):
        filename = "StaticPrefList_{}.h".format(group)
        with FileAvoidWrite(os.path.join(init_dirname, filename)) as fd:
            fd.write(text)

    for group, text in sorted(code["static_prefs_group_h"].items()):
        filename = "StaticPrefs_{}.h".format(group)
        with FileAvoidWrite(filename) as fd:
            fd.write(text)

    with FileAvoidWrite(os.path.join(init_dirname,
                                     "StaticPrefsCGetters.cpp")) as fd:
        fd.write(code["static_prefs_c_getters_cpp"])

    with FileAvoidWrite("static_prefs.rs") as fd:
        fd.write(code["static_prefs_rs"])
예제 #16
0
    def test_diff_update(self):
        """Diffs are produced on file update."""

        with MockedOpen({"file": "old"}):
            faw = FileAvoidWrite("file", capture_diff=True)
            faw.write("new")
            faw.close()

            diff = "\n".join(faw.diff)
            self.assertIn("-old", diff)
            self.assertIn("+new", diff)
예제 #17
0
    def test_diff_update(self):
        """Diffs are produced on file update."""

        with MockedOpen({'file': 'old'}):
            faw = FileAvoidWrite('file', capture_diff=True)
            faw.write('new')
            faw.close()

            diff = '\n'.join(faw.diff)
            self.assertIn('-old', diff)
            self.assertIn('+new', diff)
예제 #18
0
    def _maybe_write_file(self, path, content, result):
        fh = FileAvoidWrite(path)
        fh.write(content)
        existed, updated = fh.close()

        if not existed:
            result[0].add(path)
        elif updated:
            result[1].add(path)
        else:
            result[2].add(path)
예제 #19
0
파일: __init__.py 프로젝트: urrytr/gecko
    def _maybe_write_file(self, path, content, result):
        fh = FileAvoidWrite(path)
        fh.write(content)
        existed, updated = fh.close()

        if not existed:
            result[0].add(path)
        elif updated:
            result[1].add(path)
        else:
            result[2].add(path)
예제 #20
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument('--verbose',
                        '-v',
                        default=False,
                        action='store_true',
                        help='be verbose')
    parser.add_argument('--silent',
                        '-s',
                        default=False,
                        action='store_true',
                        help='be silent')
    parser.add_argument(
        '--srcdir',
        metavar='SRCDIR',
        action='append',
        required=True,
        help='directories to read inputs from, in order of priority')
    parser.add_argument('output', metavar='OUTPUT', help='output')
    opts = parser.parse_args(args)

    # Use reversed order so that the first srcdir has higher priority to override keys.
    properties = merge_properties('region.properties', reversed(opts.srcdir))

    default = properties.get('browser.search.defaultenginename')
    engines = properties.get_list('browser.search.order')

    if opts.verbose:
        writer = codecs.getwriter('utf-8')(sys.stdout)
        print('Read {len} engines: {engines}'.format(len=len(engines),
                                                     engines=engines),
              file=writer)
        print("Default engine is '{default}'.".format(default=default),
              file=writer)

    browsersearch = {}
    browsersearch['default'] = default
    browsersearch['engines'] = engines

    # FileAvoidWrite creates its parent directories.
    output = os.path.abspath(opts.output)
    fh = FileAvoidWrite(output)
    json.dump(browsersearch, fh)
    existed, updated = fh.close()

    if not opts.silent:
        if updated:
            print('{output} updated'.format(output=output))
        else:
            print('{output} already up-to-date'.format(output=output))

    return 0
예제 #21
0
    def install_from_file(self, filename, distdir):
        self.log(logging.INFO, 'artifact',
            {'filename': filename},
            'Installing from {filename}')

        # Do we need to post-process?
        processed_filename = filename + PROCESSED_SUFFIX

        if self._skip_cache and os.path.exists(processed_filename):
            self.log(logging.INFO, 'artifact',
                {'path': processed_filename},
                'Skipping cache: removing cached processed artifact {path}')
            os.remove(processed_filename)

        if not os.path.exists(processed_filename):
            self.log(logging.INFO, 'artifact',
                {'filename': filename},
                'Processing contents of {filename}')
            self.log(logging.INFO, 'artifact',
                {'processed_filename': processed_filename},
                'Writing processed {processed_filename}')
            self._artifact_job.process_artifact(filename, processed_filename)

        self._artifact_cache._persist_limit.register_file(processed_filename)

        self.log(logging.INFO, 'artifact',
            {'processed_filename': processed_filename},
            'Installing from processed {processed_filename}')

        # Copy all .so files, avoiding modification where possible.
        ensureParentDir(mozpath.join(distdir, '.dummy'))

        with zipfile.ZipFile(processed_filename) as zf:
            for info in zf.infolist():
                if info.filename.endswith('.ini'):
                    continue
                n = mozpath.join(distdir, info.filename)
                fh = FileAvoidWrite(n, mode='rb')
                shutil.copyfileobj(zf.open(info), fh)
                file_existed, file_updated = fh.close()
                self.log(logging.INFO, 'artifact',
                    {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n},
                    '{updating} {filename}')
                if not file_existed or file_updated:
                    # Libraries and binaries may need to be marked executable,
                    # depending on platform.
                    perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689.
                    perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r.
                    os.chmod(n, perms)
        return 0
예제 #22
0
    def test_diff_create(self):
        """Diffs are produced when files are created."""

        tmpdir = tempfile.mkdtemp()
        try:
            path = os.path.join(tmpdir, 'file')
            faw = FileAvoidWrite(path, capture_diff=True)
            faw.write('new')
            faw.close()

            diff = '\n'.join(faw.diff)
            self.assertIn('+new', diff)
        finally:
            shutil.rmtree(tmpdir)
예제 #23
0
    def install_from_file(self, filename, distdir):
        self.log(logging.INFO, 'artifact',
            {'filename': filename},
            'Installing from {filename}')

        # Do we need to post-process?
        processed_filename = filename + PROCESSED_SUFFIX

        if self._skip_cache and os.path.exists(processed_filename):
            self.log(logging.DEBUG, 'artifact',
                {'path': processed_filename},
                'Skipping cache: removing cached processed artifact {path}')
            os.remove(processed_filename)

        if not os.path.exists(processed_filename):
            self.log(logging.INFO, 'artifact',
                {'filename': filename},
                'Processing contents of {filename}')
            self.log(logging.INFO, 'artifact',
                {'processed_filename': processed_filename},
                'Writing processed {processed_filename}')
            self._artifact_job.process_artifact(filename, processed_filename)

        self._artifact_cache._persist_limit.register_file(processed_filename)

        self.log(logging.INFO, 'artifact',
            {'processed_filename': processed_filename},
            'Installing from processed {processed_filename}')

        # Copy all .so files, avoiding modification where possible.
        ensureParentDir(mozpath.join(distdir, '.dummy'))

        with zipfile.ZipFile(processed_filename) as zf:
            for info in zf.infolist():
                if info.filename.endswith('.ini'):
                    continue
                n = mozpath.join(distdir, info.filename)
                fh = FileAvoidWrite(n, mode='rb')
                shutil.copyfileobj(zf.open(info), fh)
                file_existed, file_updated = fh.close()
                self.log(logging.INFO, 'artifact',
                    {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n},
                    '{updating} {filename}')
                if not file_existed or file_updated:
                    # Libraries and binaries may need to be marked executable,
                    # depending on platform.
                    perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689.
                    perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r.
                    os.chmod(n, perms)
        return 0
예제 #24
0
def process(input_dir, inc_paths, bindings_conf, cache_dir, header_dir,
            xpcrs_dir, xpt_dir, deps_dir, module, stems):
    p = IDLParser(outputdir=cache_dir)

    xpts = []
    mk = Makefile()
    rule = mk.create_rule()

    glbl = {}
    execfile(bindings_conf, glbl)
    webidlconfig = glbl['DOMInterfaces']

    # Write out dependencies for Python modules we import. If this list isn't
    # up to date, we will not re-process XPIDL files if the processor changes.
    rule.add_dependencies(iter_modules_in_path(topsrcdir))

    for stem in stems:
        path = os.path.join(input_dir, '%s.idl' % stem)
        idl_data = open(path).read()

        idl = p.parse(idl_data, filename=path)
        idl.resolve([input_dir] + inc_paths, p, webidlconfig)

        header_path = os.path.join(header_dir, '%s.h' % stem)
        rs_rt_path = os.path.join(xpcrs_dir, 'rt', '%s.rs' % stem)
        rs_bt_path = os.path.join(xpcrs_dir, 'bt', '%s.rs' % stem)

        xpts.append(jsonxpt.build_typelib(idl))

        rule.add_dependencies(idl.deps)

        with FileAvoidWrite(header_path) as fh:
            print_header(idl, fh, path)

        with FileAvoidWrite(rs_rt_path) as fh:
            print_rust_bindings(idl, fh, path)

        with FileAvoidWrite(rs_bt_path) as fh:
            print_rust_macros_bindings(idl, fh, path)

    xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
    with FileAvoidWrite(xpt_path) as fh:
        jsonxpt.write(jsonxpt.link(xpts), fh)

    rule.add_targets([xpt_path])
    if deps_dir:
        deps_path = os.path.join(deps_dir, '%s.pp' % module)
        with FileAvoidWrite(deps_path) as fh:
            mk.dump(fh)
예제 #25
0
def process(input_dir, inc_paths, cache_dir, header_dir, xpcrs_dir,
            xpt_dir, deps_dir, module, stems):
    p = IDLParser(outputdir=cache_dir)

    xpts = {}
    mk = Makefile()
    rule = mk.create_rule()

    # Write out dependencies for Python modules we import. If this list isn't
    # up to date, we will not re-process XPIDL files if the processor changes.
    rule.add_dependencies(iter_modules_in_path(topsrcdir))

    for stem in stems:
        path = os.path.join(input_dir, '%s.idl' % stem)
        idl_data = open(path).read()

        idl = p.parse(idl_data, filename=path)
        idl.resolve([input_dir] + inc_paths, p)

        header_path = os.path.join(header_dir, '%s.h' % stem)
        rs_rt_path = os.path.join(xpcrs_dir, 'rt', '%s.rs' % stem)
        rs_bt_path = os.path.join(xpcrs_dir, 'bt', '%s.rs' % stem)

        xpt = BytesIO()
        write_typelib(idl, xpt, path)
        xpt.seek(0)
        xpts[stem] = xpt

        rule.add_dependencies(idl.deps)

        with FileAvoidWrite(header_path) as fh:
            print_header(idl, fh, path)

        with FileAvoidWrite(rs_rt_path) as fh:
            print_rust_bindings(idl, fh, path)

        with FileAvoidWrite(rs_bt_path) as fh:
            print_rust_macros_bindings(idl, fh, path)

    # TODO use FileAvoidWrite once it supports binary mode.
    xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
    xpt_link(xpts.values()).write(xpt_path)

    rule.add_targets([xpt_path])
    if deps_dir:
        deps_path = os.path.join(deps_dir, '%s.pp' % module)
        with FileAvoidWrite(deps_path) as fh:
            mk.dump(fh)
예제 #26
0
def emit_code(fd, pref_list_filename):
    pp = Preprocessor()
    pp.context.update(buildconfig.defines['ALLDEFINES'])

    # A necessary hack until MOZ_DEBUG_FLAGS are part of buildconfig.defines.
    if buildconfig.substs.get('MOZ_DEBUG'):
        pp.context['DEBUG'] = '1'

    pp.out = BytesIO()
    pp.do_filter('substitution')
    pp.do_include(pref_list_filename)

    try:
        pref_list = yaml.safe_load(pp.out.getvalue())
        code = generate_code(pref_list)
    except (IOError, ValueError) as e:
        print('{}: error:\n  {}\n'.format(pref_list_filename, e))
        sys.exit(1)

    # When generating multiple files from a script, the build system treats the
    # first named output file (StaticPrefListAll.h in this case) specially -- it
    # is created elsewhere, and written to via `fd`.
    fd.write(code['static_pref_list_all_h'])

    # We must create the remaining output files ourselves. This requires
    # creating the output directory directly if it doesn't already exist.
    ensureParentDir(fd.name)
    init_dirname = os.path.dirname(fd.name)

    with FileAvoidWrite('StaticPrefsAll.h') as fd:
        fd.write(code['static_prefs_all_h'])

    for group, text in sorted(code['static_pref_list_group_h'].items()):
        filename = 'StaticPrefList_{}.h'.format(group)
        with FileAvoidWrite(os.path.join(init_dirname, filename)) as fd:
            fd.write(text)

    for group, text in sorted(code['static_prefs_group_h'].items()):
        filename = 'StaticPrefs_{}.h'.format(group)
        with FileAvoidWrite(filename) as fd:
            fd.write(text)

    with FileAvoidWrite(os.path.join(init_dirname,
                                     'StaticPrefsCGetters.cpp')) as fd:
        fd.write(code['static_prefs_c_getters_cpp'])

    with FileAvoidWrite('static_prefs.rs') as fd:
        fd.write(code['static_prefs_rs'])
예제 #27
0
def WriteMakefile(filename,
                  data,
                  build_file,
                  depth,
                  topsrcdir,
                  srcdir,
                  relative_path,
                  common_mk_path,
                  extra_data=None):
    if not os.path.isabs(topsrcdir):
        topsrcdir = depth + "/" + topsrcdir
    if not os.path.isabs(srcdir):
        srcdir = depth + "/" + srcdir
    #TODO: should compare with the existing file and not overwrite it if the
    # contents are the same!
    ensure_directory_exists(filename)
    with FileAvoidWrite(filename) as f:
        f.write(
            COMMON_HEADER % {
                'buildfile': build_file,
                'depth': depth,
                'topsrcdir': topsrcdir,
                'srcdir': srcdir
            })
        for k, v in data.iteritems():
            f.write(
                "%s = %s\n" %
                (k, " \\\n  ".join([''] + v) if isinstance(v, list) else v))
        f.write(COMMON_FOOTER % {'common_mk_path': common_mk_path})
        if extra_data:
            f.write(extra_data)
예제 #28
0
def main(args):
    pp = Preprocessor()
    optparser = pp.getCommandLineParser()
    optparser.add_option('--nss-file', action='append',
                         type='string', dest='nss_files', default=[],
                         help='Specify a .def file that should have NSS\'s processing rules applied to it')
    options, deffiles = optparser.parse_args(args)

    symbols = set()
    for f in options.nss_files:
        symbols |= extract_symbols(nss_preprocess_file(f))
    for f in deffiles:
        # Start each deffile off with a clean slate.
        defpp = pp.clone()
        symbols |= extract_symbols(preprocess_file(defpp, f))

    script = """{
global:
  %s
local:
  *;
};
"""
    with FileAvoidWrite(options.output) as f:
        f.write(script % '\n  '.join("%s;" % s for s in sorted(symbols)))
예제 #29
0
def test_obj_as_context_manager(tmp_path):
    file = tmp_path / "file.txt"

    with FileAvoidWrite(str(file)) as fh:
        fh.write("foobar")

    assert file.read_text() == "foobar"
예제 #30
0
def gen_wrappers(unused, outdir, compiler, template_file, *header_list):
    template = open(template_file, 'r').read()

    for header in header_list:
        path = header_path(header, compiler)
        with FileAvoidWrite(os.path.join(outdir, header)) as f:
            f.write(string.Template(template).substitute(HEADER=header,
                                                         HEADER_PATH=path))
예제 #31
0
def WriteCommonMk(path, build_files, scriptname, commandline):
    with FileAvoidWrite(path) as f:
        f.write(
            COMMON_MK % {
                'input_gypfiles': ' '.join(build_files),
                'generator': scriptname,
                'commandline': ' '.join(commandline)
            })
예제 #32
0
    def environment(self, format, output=None, verbose=False):
        func = getattr(self, '_environment_%s' % format.replace('.', '_'))

        if output:
            # We want to preserve mtimes if the output file already exists
            # and the content hasn't changed.
            from mozbuild.util import FileAvoidWrite
            with FileAvoidWrite(output) as out:
                return func(out, verbose)
        return func(sys.stdout, verbose)
예제 #33
0
파일: artifacts.py 프로젝트: Bouh/gecko-dev
    def install_from_file(self, filename, distdir):
        self.log(logging.INFO, 'artifact',
            {'filename': filename},
            'Installing from {filename}')

        # Copy all .so files to dist/bin, avoiding modification where possible.
        ensureParentDir(os.path.join(distdir, 'bin', '.dummy'))

        with zipfile.ZipFile(filename) as zf:
            for info in zf.infolist():
                if not info.filename.endswith('.so'):
                    continue
                n = os.path.join(distdir, 'bin', os.path.basename(info.filename))
                fh = FileAvoidWrite(n, mode='r')
                shutil.copyfileobj(zf.open(info), fh)
                file_existed, file_updated = fh.close()
                self.log(logging.INFO, 'artifact',
                    {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n},
                    '{updating} {filename}')
        return 0
예제 #34
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument('--verbose', '-v', default=False, action='store_true',
                        help='be verbose')
    parser.add_argument('--silent', '-s', default=False, action='store_true',
                        help='be silent')
    parser.add_argument('--srcdir', metavar='SRCDIR',
                        action='append', required=True,
                        help='directories to read inputs from, in order of priority')
    parser.add_argument('output', metavar='OUTPUT',
                        help='output')
    opts = parser.parse_args(args)

    # Use reversed order so that the first srcdir has higher priority to override keys.
    properties = merge_properties('region.properties', reversed(opts.srcdir))

    default = properties.get('browser.search.defaultenginename')
    engines = properties.get_list('browser.search.order')

    if opts.verbose:
        writer = codecs.getwriter('utf-8')(sys.stdout)
        print('Read {len} engines: {engines}'.format(len=len(engines), engines=engines), file=writer)
        print("Default engine is '{default}'.".format(default=default), file=writer)

    browsersearch = {}
    browsersearch['default'] = default
    browsersearch['engines'] = engines

    # FileAvoidWrite creates its parent directories.
    output = os.path.abspath(opts.output)
    fh = FileAvoidWrite(output)
    json.dump(browsersearch, fh)
    existed, updated = fh.close()

    if not opts.silent:
        if updated:
            print('{output} updated'.format(output=output))
        else:
            print('{output} already up-to-date'.format(output=output))

    return 0
예제 #35
0
def main(argv):
    parser = argparse.ArgumentParser('Generate a file from a Python script',
                                     add_help=False)
    parser.add_argument('python_script',
                        metavar='python-script',
                        type=str,
                        help='The Python script to run')
    parser.add_argument('method_name',
                        metavar='method-name',
                        type=str,
                        help='The method of the script to invoke')
    parser.add_argument('output_file',
                        metavar='output-file',
                        type=str,
                        help='The file to generate')
    parser.add_argument(
        'additional_arguments',
        metavar='arg',
        nargs='*',
        help="Additional arguments to the script's main() method")

    args = parser.parse_args(argv)

    script = args.python_script
    # Permit the script to import modules from the same directory in which it
    # resides.  The justification for doing this is that if we were invoking
    # the script as:
    #
    #    python script arg1...
    #
    # then importing modules from the script's directory would come for free.
    # Since we're invoking the script in a roundabout way, we provide this
    # bit of convenience.
    sys.path.append(os.path.dirname(script))
    with open(script, 'r') as fh:
        module = imp.load_module('script', fh, script,
                                 ('.py', 'r', imp.PY_SOURCE))
    method = args.method_name
    if not hasattr(module, method):
        print('Error: script "{0}" is missing a {1} method'.format(
            script, method),
              file=sys.stderr)
        return 1

    ret = 1
    try:
        with FileAvoidWrite(args.output_file) as output:
            ret = module.__dict__[method](output, *args.additional_arguments)
    except IOError as e:
        print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
        traceback.print_exc()
        return 1
    return ret
예제 #36
0
    def test_diff_not_default(self):
        """Diffs are not produced by default."""

        faw = FileAvoidWrite('doesnotexist')
        faw.write('dummy')
        faw.close()
        self.assertIsNone(faw.diff)
예제 #37
0
def test_write_unicode(tmp_path):
    # Unicode grinning face :D
    binary_emoji = b"\xf0\x9f\x98\x80"

    file = tmp_path / "file.dat"
    faw = FileAvoidWrite(str(file))
    faw.write(binary_emoji)
    faw.close()
예제 #38
0
    def test_diff_not_default(self):
        """Diffs are not produced by default."""

        with MockedOpen({'file': 'old'}):
            faw = FileAvoidWrite('file')
            faw.write('dummy')
            faw.close()
            self.assertIsNone(faw.diff)
예제 #39
0
    def install_from_file(self, filename, distdir):
        self.log(logging.INFO, 'artifact', {'filename': filename},
                 'Installing from {filename}')

        # Copy all .so files to dist/bin, avoiding modification where possible.
        ensureParentDir(os.path.join(distdir, 'bin', '.dummy'))

        with zipfile.ZipFile(filename) as zf:
            for info in zf.infolist():
                if not info.filename.endswith('.so'):
                    continue
                n = os.path.join(distdir, 'bin',
                                 os.path.basename(info.filename))
                fh = FileAvoidWrite(n, mode='r')
                shutil.copyfileobj(zf.open(info), fh)
                file_existed, file_updated = fh.close()
                self.log(
                    logging.INFO, 'artifact', {
                        'updating':
                        'Updating' if file_updated else 'Not updating',
                        'filename': n
                    }, '{updating} {filename}')
        return 0
예제 #40
0
def gifft_map(output_fd, *args):
    probe_type = args[-1]
    args = args[DEPS_LEN:-1]
    all_objs, options = parse(args)

    # Events also need to output maps from event extra enum to strings.
    # Sadly we need to generate code for all possible events, not just mirrored.
    # Otherwise we won't compile.
    if probe_type == "Event":
        output_path = Path(os.path.dirname(output_fd.name))
        with FileAvoidWrite(output_path / "EventExtraGIFFTMaps.cpp") as cpp_fd:
            output_gifft_map(output_fd, probe_type, all_objs, cpp_fd)
    else:
        output_gifft_map(output_fd, probe_type, all_objs, None)
    def _test_one(self, name):
        with TemporaryDirectory() as tmpdir:
            with NamedTemporaryFile(mode='r+') as temp:
                srcdir = os.path.join(test_data_path, name)

                with FileAvoidWrite(temp.name) as faw:
                    generate_browsersearch.main(
                        faw,
                        '--silent',
                        '--fallback',
                        mozpath.join(srcdir, 'region.properties'),
                    )

                return json.load(temp)
예제 #42
0
def main(outdir, compiler, template_file, header_list_file):
    if not os.path.isdir(outdir):
        os.mkdir(outdir)

    template = open(template_file, 'r').read()

    for header in open(header_list_file, 'r'):
        header = header.rstrip()
        if 0 == len(header) or is_comment(header):
            continue

        path = header_path(header, compiler)
        with FileAvoidWrite(os.path.join(outdir, header)) as f:
            f.write(string.Template(template).substitute(HEADER=header,
                                                         HEADER_PATH=path))
예제 #43
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument("--verbose", "-v", default=False, action="store_true", help="be verbose")
    parser.add_argument("--silent", "-s", default=False, action="store_true", help="be silent")
    parser.add_argument(
        "--srcdir",
        metavar="SRCDIR",
        action="append",
        required=True,
        help="directories to read inputs from, in order of priority",
    )
    parser.add_argument("output", metavar="OUTPUT", help="output")
    opts = parser.parse_args(args)

    # Use reversed order so that the first srcdir has higher priority to override keys.
    properties = merge_properties("region.properties", reversed(opts.srcdir))

    # Default, not region-specific.
    default = properties.get("browser.search.defaultenginename")
    engines = properties.get_list("browser.search.order")

    writer = codecs.getwriter("utf-8")(sys.stdout)
    if opts.verbose:
        print("Read {len} engines: {engines}".format(len=len(engines), engines=engines), file=writer)
        print("Default engine is '{default}'.".format(default=default), file=writer)

    browsersearch = {}
    browsersearch["default"] = default
    browsersearch["engines"] = engines

    # This gets defaults, yes; but it also gets the list of regions known.
    regions = properties.get_dict("browser.search.defaultenginename")

    browsersearch["regions"] = {}
    for region in regions.keys():
        region_default = regions[region]
        region_engines = properties.get_list("browser.search.order.{region}".format(region=region))

        if opts.verbose:
            print(
                "Region '{region}': Read {len} engines: {region_engines}".format(
                    len=len(region_engines), region=region, region_engines=region_engines
                ),
                file=writer,
            )
            print(
                "Region '{region}': Default engine is '{region_default}'.".format(
                    region=region, region_default=region_default
                ),
                file=writer,
            )

        browsersearch["regions"][region] = {"default": region_default, "engines": region_engines}

    # FileAvoidWrite creates its parent directories.
    output = os.path.abspath(opts.output)
    fh = FileAvoidWrite(output)
    json.dump(browsersearch, fh)
    existed, updated = fh.close()

    if not opts.silent:
        if updated:
            print("{output} updated".format(output=output))
        else:
            print("{output} already up-to-date".format(output=output))

    return 0
예제 #44
0
    def test_file_avoid_write(self):
        with MockedOpen({"file": "content"}):
            # Overwriting an existing file replaces its content
            faw = FileAvoidWrite("file")
            faw.write("bazqux")
            self.assertEqual(faw.close(), (True, True))
            self.assertEqual(open("file", "r").read(), "bazqux")

            # Creating a new file (obviously) stores its content
            faw = FileAvoidWrite("file2")
            faw.write("content")
            self.assertEqual(faw.close(), (False, True))
            self.assertEqual(open("file2").read(), "content")

        with MockedOpen({"file": "content"}):
            with FileAvoidWrite("file") as file:
                file.write("foobar")

            self.assertEqual(open("file", "r").read(), "foobar")

        class MyMockedOpen(MockedOpen):
            """MockedOpen extension to raise an exception if something
            attempts to write in an opened file.
            """

            def __call__(self, name, mode):
                if "w" in mode:
                    raise Exception, "Unexpected open with write mode"
                return MockedOpen.__call__(self, name, mode)

        with MyMockedOpen({"file": "content"}):
            # Validate that MyMockedOpen works as intended
            file = FileAvoidWrite("file")
            file.write("foobar")
            self.assertRaises(Exception, file.close)

            # Check that no write actually happens when writing the
            # same content as what already is in the file
            faw = FileAvoidWrite("file")
            faw.write("content")
            self.assertEqual(faw.close(), (True, False))
예제 #45
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument('--verbose', '-v', default=False, action='store_true',
                        help='be verbose')
    parser.add_argument('--silent', '-s', default=False, action='store_true',
                        help='be silent')
    parser.add_argument('--android-package-name', metavar='NAME',
                        required=True,
                        help='Android package name')
    parser.add_argument('--resources', metavar='RESOURCES',
                        default=None,
                        help='optional Android resource directory to find drawables in')
    parser.add_argument('--srcdir', metavar='SRCDIR',
                        action='append', required=True,
                        help='directories to read inputs from, in order of priority')
    parser.add_argument('output', metavar='OUTPUT',
                        help='output')
    opts = parser.parse_args(args)

    def resolve_filename(filename):
        for srcdir in opts.srcdir:
            path = mozpath.join(srcdir, filename)
            if os.path.exists(path):
                return path
        return None

    # The list.txt file has one site name per line.
    names = [s.strip() for s in open(resolve_filename('list.txt'), 'rt').readlines()]
    if opts.verbose:
        print('Reading {len} suggested sites: {names}'.format(len=len(names), names=names))

    # Keep these two in sync.
    image_url_template = 'android.resource://%s/drawable/suggestedsites_{name}' % opts.android_package_name
    drawables_template = 'drawable*/suggestedsites_{name}.*'

    # Load json files corresponding to each site name and define their
    # respective image URL.
    sites = []
    for name in names:
        filename = resolve_filename(name + '.json')
        if opts.verbose:
            print("Reading '{name}' from {filename}"
                .format(name=name, filename=filename))
        site = json.load(open(filename, 'rt'))
        site['imageurl'] = image_url_template.format(name=name)
        sites.append(site)

        # Now check for existence of an appropriately named drawable.  If none
        # exists, throw.  This stops a locale discovering, at runtime, that the
        # corresponding drawable was not added to en-US.
        if not opts.resources:
            continue
        resources = os.path.abspath(opts.resources)
        finder = FileFinder(resources)
        matches = [p for p, _ in finder.find(drawables_template.format(name=name))]
        if not matches:
            raise Exception("Could not find drawable in '{resources}' for '{name}'"
                .format(resources=resources, name=name))
        else:
            if opts.verbose:
                print("Found {len} drawables in '{resources}' for '{name}': {matches}"
                      .format(len=len(matches), resources=resources, name=name, matches=matches))

    # FileAvoidWrite creates its parent directories.
    output = os.path.abspath(opts.output)
    fh = FileAvoidWrite(output)
    json.dump(sites, fh)
    existed, updated = fh.close()

    if not opts.silent:
        if updated:
            print('{output} updated'.format(output=output))
        else:
            print('{output} already up-to-date'.format(output=output))

    return 0
예제 #46
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument('--verbose', '-v', default=False, action='store_true',
                        help='be verbose')
    parser.add_argument('--silent', '-s', default=False, action='store_true',
                        help='be silent')
    parser.add_argument('--android-package-name', metavar='NAME',
                        required=True,
                        help='Android package name')
    parser.add_argument('--resources', metavar='RESOURCES',
                        default=None,
                        help='optional Android resource directory to find drawables in')
    parser.add_argument('--srcdir', metavar='SRCDIR',
                        action='append', required=True,
                        help='directories to read inputs from, in order of priority')
    parser.add_argument('output', metavar='OUTPUT',
                        help='output')
    opts = parser.parse_args(args)

    # Use reversed order so that the first srcdir has higher priority to override keys.
    properties = merge_properties('region.properties', reversed(opts.srcdir))
    names = properties.get_list('browser.suggestedsites.list')
    if opts.verbose:
        print('Reading {len} suggested sites: {names}'.format(len=len(names), names=names))

    # Keep these two in sync.
    image_url_template = 'android.resource://%s/drawable/suggestedsites_{name}' % opts.android_package_name
    drawables_template = 'drawable*/suggestedsites_{name}.*'

    # Load properties corresponding to each site name and define their
    # respective image URL.
    sites = []
    for name in names:
        site = properties.get_dict('browser.suggestedsites.{name}'.format(name=name), required_keys=('title', 'url', 'bgcolor'))
        site['imageurl'] = image_url_template.format(name=name)
        sites.append(site)

        # Now check for existence of an appropriately named drawable.  If none
        # exists, throw.  This stops a locale discovering, at runtime, that the
        # corresponding drawable was not added to en-US.
        if not opts.resources:
            continue
        resources = os.path.abspath(opts.resources)
        finder = FileFinder(resources)
        matches = [p for p, _ in finder.find(drawables_template.format(name=name))]
        if not matches:
            raise Exception("Could not find drawable in '{resources}' for '{name}'"
                .format(resources=resources, name=name))
        else:
            if opts.verbose:
                print("Found {len} drawables in '{resources}' for '{name}': {matches}"
                      .format(len=len(matches), resources=resources, name=name, matches=matches))

    # FileAvoidWrite creates its parent directories.
    output = os.path.abspath(opts.output)
    fh = FileAvoidWrite(output)
    json.dump(sites, fh)
    existed, updated = fh.close()

    if not opts.silent:
        if updated:
            print('{output} updated'.format(output=output))
        else:
            print('{output} already up-to-date'.format(output=output))

    return 0