Esempio n. 1
0
def test_is_dir_empty():
    work_dir = os.getcwd()

    test_dir_path = os.path.join(work_dir, 'dir')
    deleted_file_path = os.path.join(test_dir_path, 'deleted2.txt')
    deleted_file2_path = os.path.join(test_dir_path, 'deleted.txt')
    mkdir(test_dir_path)

    ntfile = NTFile(test_dir_path)
    ntfile2 = NTFile(deleted_file_path)

    try:

        assert ntfile.is_dir_empty
        touch(deleted_file_path)
        touch(deleted_file2_path)
        assert not ntfile.is_dir_empty
        ntfile2.open(Access.DELETE, Share.DELETE)
        ntfile2.dispose()
        assert not ntfile.is_dir_empty
        rm(deleted_file2_path)
        assert ntfile.is_dir_empty

    finally:
        ntfile.close()
        ntfile2.close()
Esempio n. 2
0
def test_iterate_on_dir():
    work_dir = os.getcwd()

    test_dir_path = os.path.join(work_dir, 'dir')
    mkdir(test_dir_path)

    result = set()

    def fun(name, ntfile_instance):
        result.add(name)
        return True, False

    try:
        ntfile = NTFile(test_dir_path)
        status = ntfile.iterate_on_dir(fun, default_result=False)
        assert not result
        assert not status
    finally:
        ntfile.close()

    for n in range(0, 40):
        touch(os.path.join(test_dir_path, '%s.txt' % n))
    try:
        ntfile = NTFile(test_dir_path)
        status = ntfile.iterate_on_dir(fun, default_result=False)
        assert status
        assert len(result) == 40
    finally:
        ntfile.close()
Esempio n. 3
0
    def create(self, quiet=False):
        """Create a build space.

        The function create all the necessary directories and files to have
        a valid empty build space

        :param quiet: do not print info messages
        :type quiet: bool
        """
        for d in self.dirs:
            mkdir(getattr(self, '%s_dir' % d), quiet=quiet)
Esempio n. 4
0
def test_open_file_in_dir():
    work_dir = os.getcwd()

    test_dir_path = os.path.join(work_dir, 'dir')
    mkdir(test_dir_path)
    touch(os.path.join(test_dir_path, 'toto.txt'))
    try:
        ntfile = NTFile(test_dir_path)
        ntfile.open()
        ntfile2 = NTFile('toto.txt', parent=ntfile)
        ntfile2.open()
    finally:
        ntfile.close()
        ntfile2.close()
Esempio n. 5
0
    def run(self, args):
        sandbox = SandBox()
        sandbox.root_dir = args.sandbox

        sandbox.create_dirs()

        if args.spec_git_url:
            mkdir(sandbox.spec_dir)
            g = GitRepository(sandbox.spec_dir)
            if e3.log.default_output_stream is not None:
                g.log_stream = e3.log.default_output_stream
            g.init()
            g.update(args.spec_git_url, args.spec_git_branch, force=True)

        sandbox.dump_configuration()
        sandbox.write_scripts()
Esempio n. 6
0
def test_unlink():
    work_dir = os.getcwd()

    test_dir_path = os.path.join(work_dir, 'dir')
    deleted_file_path = os.path.join(test_dir_path, 'deleted2.txt')
    mkdir(test_dir_path)

    ntfile = NTFile(test_dir_path)
    ntfile3 = NTFile(test_dir_path)
    ntfile2 = NTFile(deleted_file_path)

    try:
        # delete inexisting file
        ntfile2.unlink()

        # delete file with readonly attribute
        touch(deleted_file_path)
        ntfile2.read_attributes()
        ntfile2.basic_info.file_attributes.attr |= FileAttribute.READONLY
        ntfile2.write_attributes()
        ntfile2.unlink()

        # delete file already pending deletion
        touch(deleted_file_path)
        ntfile2.open(Access.DELETE, Share.DELETE)
        ntfile2.dispose()
        ntfile2.unlink()

        # delete containing directory
        ntfile.unlink()

        ntfile.close()
        ntfile2.close()

        mkdir(test_dir_path)
        ntfile.open(Access.LIST_DIRECTORY, Share.ALL)
        ntfile3.unlink()

    finally:
        ntfile.close()
        ntfile2.close()
        ntfile2.close()
Esempio n. 7
0
File: basic.py Progetto: AdaCore/gps
    def prepare(self, previous_values, slot):
        testsuite_dir = os.path.join(os.path.dirname(__file__), "..")
        mkdir(self.test_env["working_dir"])
        sync_tree(self.test_env["test_dir"], self.test_env["working_dir"])

        # Create .gnatstudio
        self.gps_home = os.path.join(self.test_env["working_dir"],
                                     ".gnatstudio")
        mkdir(self.gps_home)

        # Populate the .gnatstudio dir
        sync_tree(
            os.path.abspath(os.path.join(testsuite_dir, "gnatstudio_home")),
            self.gps_home,
            delete=False,
        )
        if self.env.options.pycov:
            cp(
                os.path.join(testsuite_dir, "pycov_data", "pycov_startup.xml"),
                os.path.join(self.gps_home, "startup.xml"),
            )
            # Copy the coverage preference
            cp(
                os.path.join(testsuite_dir, "pycov_data", ".coveragerc"),
                self.test_env["working_dir"],
            )
            py_name = ".coverage"
            py_dir = os.path.join(testsuite_dir, "pycov_data")
            mkdir(py_dir)
            self.test_env["pycov"] = os.path.abspath(
                os.path.join(py_dir, py_name))
        else:
            self.test_env["pycov"] = ""
Esempio n. 8
0
    def setup_result_dirs(self) -> None:
        """Create the output directory in which the results are stored."""
        assert self.main.args
        args = self.main.args

        # Both the actual new/old directories to use depend on both
        # --output-dir and --old-output-dir options.
        d = os.path.abspath(args.output_dir)
        if args.old_output_dir:
            self.output_dir = d
            old_output_dir = os.path.abspath(args.old_output_dir)
        else:
            self.output_dir = os.path.join(d, "new")
            old_output_dir = os.path.join(d, "old")

        # Rotate results directories if requested. In both cases, make sure the
        # new results dir is clean.
        if args.rotate_output_dirs:
            if os.path.isdir(old_output_dir):
                rm(old_output_dir, recursive=True)
            if os.path.isdir(self.output_dir):
                mv(self.output_dir, old_output_dir)
        elif os.path.isdir(self.output_dir):
            rm(self.output_dir, recursive=True)
        mkdir(self.output_dir)

        # Remember about the old output directory only if it exists and does
        # contain results. If not, this info will be unused at best, or lead to
        # incorrect behavior.
        self.old_output_dir = None
        if (os.path.exists(old_output_dir) and os.path.exists(
                os.path.join(old_output_dir, ReportIndex.INDEX_FILENAME))):
            self.old_output_dir = old_output_dir

        if args.dump_environ:
            with open(os.path.join(self.output_dir, "environ.sh"), "w") as f:
                for var_name in sorted(os.environ):
                    f.write("export {}={}\n".format(
                        var_name, quote_arg(os.environ[var_name])))
Esempio n. 9
0
def xcov_instrument(gprsw, covlevel, extra_args=[], dump_trigger=None,
                    dump_channel=None, gpr_obj_dir=None, out=None, err=None,
                    register_failure=True):
    """
    Run "gnatcov instrument" on a project.

    :param GPRswitches gprsw: Project file command line switches to honor.
    :param None|str covlevel: Coverage level for the instrumentation
        (--level argument). Not passed if None.
    :param list[str] extra_args: Extra arguments to append to the command line.
    :param None|str dump_trigger: Trigger to dump coverage buffers
        (--dump-trigger argument). If left to None,
        use SCOV.instr.default_dump_trigger.
    :param None|str dump_channel: Channel to dump coverage buffers
        (--dump-channel argument). If left to None,
        use SCOV.instr.default_dump_channel.
    :param None|str gpr_obj_dir: Optional name of the directory where gprbuild
        will create build artifacts. If left to None, assume they are produced
        in the current directory.

    See SUITE.tutils.xcov for the other supported options.
    """
    # Create the object directory so that gnatcov does not warn that it
    # does not exist. This is specific to the source trace mode because
    # we run gnatcov before gprbuild.
    if gpr_obj_dir:
        mkdir(gpr_obj_dir)

    covlevel_args = [] if covlevel is None else ['--level', covlevel]
    args = (['instrument'] + covlevel_args +
            ['--dump-trigger', dump_trigger or default_dump_trigger(),
             '--dump-channel', dump_channel or default_dump_channel()] +
            gprsw.cov_switches +
            extra_args)

    if thistest.options.pretty_print:
        args.append('--pretty-print')

    xcov(args, out=out, err=err, register_failure=register_failure)
Esempio n. 10
0
def gprbuild(
    driver: TestDriver,
    project_file: Optional[str] = None,
    cwd: Optional[str] = None,
    gcov: bool = False,
) -> bool:
    if project_file is None:
        project_file = os.path.join(driver.test_env["test_dir"], "test.gpr")
    if cwd is None:
        cwd = driver.test_env["working_dir"]
    mkdir(cwd)
    gprbuild_cmd = [
        "gprbuild",
        "--relocate-build-tree",
        "-p",
        "-P",
        project_file,
    ]
    if gcov:
        gprbuild_cmd += ["-largs", "-lgcov"]
    check_call(driver, gprbuild_cmd, cwd=cwd)
    return True
Esempio n. 11
0
def run_test(label, slug, main, helper, recursive, projects=[], units=[],
             projects_warned=[], expected_cov_list=[]):
    """
    Produce a coverage report for the given parameters and check the emitted
    warnings.

    :param str label: Label for this test.
    :param str slug: Unique short string for this test (used to create
        directories).
    :param ProjectConfig main: Configuration for the "main" project.
    :param ProjectConfig helper: Configuration for the "helper" project.
    :param bool recursive: Whether to not pass --no-subprojects.
    :param list[str] projects: List of projects to pass with --projects.
    :param list[str] units: List of units to pass with --units.
    :param list[str] projects_warned: List of projects for which we expected
        warnings.
    :param expected_cov: List of expected coverage reports.
    """
    thistest.log('== [{}] {} =='.format(slug, label))
    tmp.to_subdir('wd_/{}'.format(slug))

    expected_output = '\n'.join(
        'warning: project {} provides no unit of interest'
        .format(project) for project in projects_warned)

    # Generate projects for this test (see below for the description of each
    # project).
    ProjectConfig().generate('empty')
    helper.generate('helper')
    main_prj = main.generate('main', deps=['empty', 'helper'],
                             mains=['main.adb'])
    mkdir('obj-empty')
    mkdir('obj-helper')
    mkdir('obj-main')

    # Generate a coverage report for them
    build_run_and_coverage(
        gprsw=GPRswitches(root_project=main_prj,
                          projects=projects,
                          units=units,
                          no_subprojects=not recursive),
        covlevel='stmt',
        mains=['main'],
        gpr_obj_dir='obj-main',
        extra_coverage_args=['-axcov'])

    log_file = ('coverage.log'
                if thistest.options.trace_mode == 'bin' else
                'instrument.log')
    thistest.fail_if_not_equal(
        '[{}/{}] gnatcov output'.format(label, slug),
        expected_output,
        contents_of(log_file).strip())

    expected_cov = {}
    for c in expected_cov_list:
        expected_cov.update(c)
    check_xcov_reports('obj-*/*.xcov', expected_cov)
Esempio n. 12
0
 def do_createsource(self) -> None:
     """Prepare src from vcs to cache using sourcebuilders."""
     source_name = self.data.source_name
     assert self.sandbox.tmp_dir is not None
     assert self.sandbox.vcs_dir is not None
     tmp_cache_dir = os.path.join(self.sandbox.tmp_dir, "cache")
     src = self.sandbox.vcs_dir
     src_builder = get_source_builder(self.data.anod_instance,
                                      source_name,
                                      local_sources_only=True)
     if src_builder is not None:
         repo_dict = {}
         src_dir = os.path.join(src, src_builder.checkout[0])
         dest_dir = os.path.join(tmp_cache_dir, source_name)
         # ??? missing repository state
         repo_dict[source_name] = {"working_dir": src_dir}
         mkdir(dest_dir)
         if TYPE_CHECKING:
             assert src_builder.prepare_src is not None
         src_builder.prepare_src(repo_dict, dest_dir)
         self.__status = STATUS.success
         logger.debug("%s created in cache/tmp", source_name)
     return
Esempio n. 13
0
    def build(self, prev, slot):
        self.logger = logging.getLogger(f"test.{self.test_env['test_name']}")

        env = {
            "TEST_SOURCES": self.test_source_dir,
            "SUPPORT_SOURCES": self.support_source_dir,
        }

        mkdir(self.build_dir)
        py_files = ls(os.path.join(self.test_source_dir, "*.py"))
        if py_files:
            cp(py_files, self.build_dir)
        check_call(
            self,
            [
                "gprbuild", "-P", self.project_file, "--relocate-build-tree",
                "-p"
            ],
            cwd=self.build_dir,
            timeout=300,
            env=env,
            ignore_environ=False,
        )
Esempio n. 14
0
    def create_package(self, anod_instance):
        """Generate a package as a ZIP archive.

        :param anod_instance: the Anod instance that creates the package
        :type anod_instance: Anod
        :return: the full path to the generated archive
        :rtype: str
        """
        pkg_name = self.pkg_name(anod_instance)
        pkg_path = self.pkg_path(anod_instance)

        # Reset binary dir
        rm(anod_instance.build_space.binary_dir, True)
        mkdir(anod_instance.build_space.binary_dir)

        # Create the zip archive
        create_archive(
            filename=os.path.basename(pkg_path),
            from_dir=anod_instance.build_space.pkg_dir,
            dest=os.path.dirname(pkg_path),
            from_dir_rename=pkg_name,
        )
        return pkg_path
Esempio n. 15
0
    def build_kit(self):
        announce("building %s kit" % self.this_docformat)

        os.chdir(self.workdir)

        # The kit name is computed as:
        #
        #    gnatcov-qualkit-<kitid>-<YYYYMMDD>
        #
        # where <YYYYMMDD> is the kit production stamp (now), and <kitid> is
        # computed from the git branch off which the artifacts are taken. The
        # git branch name might contain the "qualkit" indication already.

        today = date.today()
        gitbranch = current_gitbranch_at(self.repodir)

        kitprefix = ("gnatcov-qualkit"
                     if "qualkit" not in gitbranch else "gnatcov")

        kitid = gitbranch
        kitid = kitid.replace('/', '-')
        kitid = kitid.replace('.', '_')

        # If we are re-constructing a kit with some parts just rebuilt, target
        # the specified version (stamp) and arrange to keep the old elements
        # in place:

        kitstamp = (self.o.rekit if self.o.rekit else "%4d%02d%02d" %
                    (today.year, today.month, today.day))
        kitname = "%s-%s-%s" % (kitprefix, kitid, kitstamp)
        kitdir = "%s-%s" % (kitname, self.this_docformat)

        mkdir(kitdir)

        [self.__relocate_into(dir=kitdir, part=part) for part in self.o.parts]

        run("zip -q -r %(kitdir)s.zip %(kitdir)s" % {"kitdir": kitdir})
Esempio n. 16
0
    def install(self):
        os.chdir(self.root_dir)

        for d in (("/bin", 0755),
                  ("/dev", 0755),
                  ("/dev/mqueue", 01777),
                  ("/dev/shm", 01777),
                  ("/etc", 0755),
                  ("/etc/fstab.d", 01777),
                  ("/lib", 0755),
                  ("/tmp", 01777),
                  ("/usr", 0755),
                  ("/usr/bin", 0755),
                  ("/usr/lib", 0755),
                  ("/usr/local", 0755),
                  ("/usr/local/bin", 0755),
                  ("/usr/local/etc", 0755),
                  ("/usr/local/lib", 0755),
                  ("/usr/src", 0755),
                  ("/usr/tmp", 01777),
                  ("/var", 0755),
                  ("/var/log", 01777),
                  ("/var/run", 01777),
                  ("/var/tmp", 01777),
                  ("/etc/setup", 0755)):
            mkdir(self.root_dir + d[0])
            self.cygwin_chmod(self.root_dir + d[0], d[1])
        # Ensure /var/empty is created. When updating an installation
        # the /var/empty security might have been tighten by cyg_server
        # account (sshd) and thus chmod might fail. Just ignore the
        # error
        try:
            mkdir(self.root_dir + '/var/empty')
            self.cygwin_chmod(self.root_dir + '/var/empty', 0755)
        except Exception:
            pass

        for d in (('c:/home', 01777),
                  ('c:/tmp', 01777),
                  ('c:/tmp/trash', 01777)):
            mkdir(d[0])
            self.cygwin_chmod(d[0], d[1]),

        package_number = len(self.to_be_removed)
        for index, p in enumerate(self.to_be_removed):
            logging.info('Remove package (%s/%s): %s' %
                         (index + 1, package_number, p))
            self.uninstall_package(p)

        package_number = len(self.to_be_installed)
        for index, p in enumerate(self.to_be_installed):
            logging.info('Installing packages (%s/%s): %s' %
                         (index + 1, package_number, p))
            self.install_package(p)
        self.run_postinstall()
        self.db.dump()
Esempio n. 17
0
def test_rsync_mode():
    """Check that rsync mode is faster than default mode."""
    mkdir("work")
    mkdir("work2")
    GitRepository.create("git")
    for _ in range(1000):
        name = str(uuid.uuid1(clock_seq=int(1000 * time.time())))
        touch(os.path.join("git", name + ".py"))
        touch(os.path.join("git", name + ".pyc"))
        touch(os.path.join("git", name + ".o"))
        touch(os.path.join("git", name + ".ali"))

    with open("git/.gitignore", "w") as fd:
        fd.write("*.pyc\n")
        fd.write("*.o\n")
        fd.write("*.ali\n")

    m = CheckoutManager(name="myrepo", working_dir="work")
    m.update(vcs="external", url=os.path.abspath("git"))

    os.environ["E3_ENABLE_FEATURE"] = "use-rsync"

    m = CheckoutManager(name="myrepo", working_dir="work2")
    m.update(vcs="external", url=os.path.abspath("git"))
Esempio n. 18
0
def make_gnatcoll_for_gcov(work_dir):
    """Build gnatcoll core with gcov instrumentation.

    :param work_dir: working directory. gnatcoll is built in `build` subdir
        and installed in `install` subdir
    :type work_dir: str
    :return: a triplet (project path, source path, object path)
    :rtype: (str, str, str)
    :raise AssertError: in case compilation of installation fails
    """
    logging.info('Compiling gnatcoll with gcov instrumentation')
    build_dir = os.path.join(work_dir, 'build')
    install_dir = os.path.join(work_dir, 'install')
    mkdir(build_dir)
    mkdir(install_dir)

    make_gnatcoll_cmd = [
        'make', '-f',
        os.path.join(GNATCOLL_ROOT_DIR, 'Makefile'), 'BUILD=DEBUG',
        'GPRBUILD_OPTIONS=-cargs -fprofile-arcs -ftest-coverage -gargs',
        'ENABLE_SHARED=no'
    ]

    p = Run(make_gnatcoll_cmd, cwd=build_dir)
    assert p.status == 0, "gnatcoll build failed:\n%s" % p.out

    p = Run(make_gnatcoll_cmd + ['prefix=%s' % install_dir, 'install'],
            cwd=build_dir)
    assert p.status == 0, "gnatcoll installation failed:\n%s" % p.out

    # Add the resulting library into the GPR path
    Env().add_search_path('GPR_PROJECT_PATH',
                          os.path.join(install_dir, 'share', 'gpr'))
    return (os.path.join(install_dir, 'share', 'gpr'),
            os.path.join(install_dir, 'include', 'gnatcoll'),
            os.path.join(build_dir, 'obj', 'gnatcoll', 'static'))
Esempio n. 19
0
def make_gnatcoll(work_dir, gcov=False):
    """Build gnatcoll core with or without gcov instrumentation.

    :param work_dir: working directory. gnatcoll is built in `build` subdir
        and installed in `install` subdir
    :type work_dir: str
    :param gcov: if False then build gcov in PROD mode, otherwise
        build it with gcov instrumentation in DEBUG mode
    :type gcov: bool
    :return: a triplet (project path, source path, object path)
    :rtype: (str, str, str)
    :raise AssertError: in case compilation of installation fails
    """

    # Create build tree structure
    build_dir = os.path.join(work_dir, 'build')
    install_dir = os.path.join(work_dir, 'install')
    mkdir(build_dir)
    mkdir(install_dir)

    # Compute make invocation
    for binding in ('gmp', 'iconv', 'python'):
        logging.info('Compiling gnatcoll %s (gcov=%s)', binding, gcov)
        setup = os.path.join(GNATCOLL_ROOT_DIR, binding, 'setup.py')
        obj_dir = os.path.join(build_dir, binding)
        mkdir(obj_dir)

        build_cmd = [sys.executable, setup, 'build', '--disable-shared']
        install_cmd = [sys.executable, setup, 'install',
                       '--prefix', install_dir]

        if gcov:
            build_cmd += ['--gpr-opts', '-cargs', '-fprofile-arcs',
                          '-ftest-coverage',
                          '-largs', '-lgcov',
                          '-gargs', '-XBUILD=DEBUG']
        else:
            build_cmd += ['-gpr-opts', '-XBUILD=PROD']

        # Build & Install
        p = Run(build_cmd, cwd=obj_dir)
        assert p.status == 0, \
            "gnatcoll %s build failed:\n%s" % (binding, p.out)
        logging.debug('build:\n%s', p.out)

        p = Run(install_cmd, cwd=obj_dir)
        assert p.status == 0, \
            "gnatcoll %s installation failed:\n%s" % (binding, p.out)
        logging.debug('install:\n%s', p.out)

    return (os.path.join(install_dir, 'share', 'gpr'),
            os.path.join(install_dir, 'include'),
            build_dir)
Esempio n. 20
0
    def testsuite_main(self, args: Optional[List[str]] = None) -> int:
        """Main for the main testsuite script.

        :param args: Command line arguments. If None, use `sys.argv`.
        :return: The testsuite status code (0 for success, a positive for
            failure).
        """
        self.main = Main(platform_args=True)

        # Add common options
        parser = self.main.argument_parser

        temp_group = parser.add_argument_group(
            title="temporaries handling arguments")
        temp_group.add_argument("-t",
                                "--temp-dir",
                                metavar="DIR",
                                default=Env().tmp_dir)
        temp_group.add_argument(
            "--no-random-temp-subdir",
            dest="random_temp_subdir",
            action="store_false",
            help="Disable the creation of a random subdirectory in the"
            " temporary directory. Use this when you know that you have"
            " exclusive access to the temporary directory (needed in order to"
            " avoid name clashes there) to get a deterministic path for"
            " testsuite temporaries.")
        temp_group.add_argument(
            "-d",
            "--dev-temp",
            metavar="DIR",
            nargs="?",
            default=None,
            const="tmp",
            help="Convenience shortcut for dev setups: forces `-t DIR"
            " --no-random-temp-subdir --cleanup-mode=none` and cleans up `DIR`"
            ' first. If no directory is provided, use the local "tmp"'
            " directory.")

        cleanup_mode_map = enum_to_cmdline_args_map(CleanupMode)
        temp_group.add_argument(
            "--cleanup-mode",
            choices=list(cleanup_mode_map),
            help="Control the cleanup of working spaces.\n" +
            "\n".join(f"{name}: {CleanupMode.descriptions()[value]}"
                      for name, value in cleanup_mode_map.items()))
        temp_group.add_argument(
            "--disable-cleanup",
            action="store_true",
            help="Disable cleanup of working spaces. This option is deprecated"
            " and will disappear in a future version of e3-testsuite. Please"
            " use --cleanup-mode instead.")

        output_group = parser.add_argument_group(
            title="results output arguments")
        output_group.add_argument(
            "-o",
            "--output-dir",
            metavar="DIR",
            default="./out",
            help="Select the output directory, where test results are to be"
            " stored (default: './out'). If --old-output-dir=DIR2 is passed,"
            " the new results are stored in DIR while DIR2 contains results"
            " from a previous run. Otherwise, the new results are stored in"
            " DIR/new/ while the old ones are stored in DIR/old. In both"
            " cases, the testsuite cleans the directory for new results"
            " first.",
        )
        output_group.add_argument(
            "--old-output-dir",
            metavar="DIR",
            help="Select the old output directory, for baseline comparison."
            " See --output-dir.",
        )
        output_group.add_argument(
            "--rotate-output-dirs",
            default=False,
            action="store_true",
            help="Rotate testsuite results: move the new results directory to"
            " the old results one before running testcases (this removes the"
            " old results directory first). If not passed, we just remove the"
            " new results directory before running testcases (i.e. just ignore"
            " the old results directory).",
        )
        output_group.add_argument(
            "--show-error-output",
            "-E",
            action="store_true",
            help="When testcases fail, display their output. This is for"
            " convenience for interactive use.",
        )
        output_group.add_argument(
            "--show-time-info",
            action="store_true",
            help="Display time information for test results, if available",
        )
        output_group.add_argument(
            "--xunit-output",
            dest="xunit_output",
            metavar="FILE",
            help="Output testsuite report to the given file in the standard"
            " XUnit XML format. This is useful to display results in"
            " continuous build systems such as Jenkins.",
        )
        output_group.add_argument(
            "--gaia-output",
            action="store_true",
            help="Output a GAIA-compatible testsuite report next to the YAML"
            " report.",
        )
        output_group.add_argument(
            "--status-update-interval",
            default=1.0,
            type=float,
            help="Minimum number of seconds between status file updates. The"
            " more often we update this file, the more often one will read"
            " garbage.")

        auto_gen_default = ("enabled"
                            if self.auto_generate_text_report else "disabled")
        output_group.add_argument(
            "--generate-text-report",
            action="store_true",
            dest="generate_text_report",
            default=self.auto_generate_text_report,
            help=(
                f"When the testsuite completes, generate a 'report' text file"
                f" in the output directory ({auto_gen_default} by default)."),
        )
        output_group.add_argument(
            "--no-generate-text-report",
            action="store_false",
            dest="generate_text_report",
            help="Disable the generation of a 'report' text file (see"
            "--generate-text-report).",
        )

        output_group.add_argument(
            "--truncate-logs",
            "-T",
            metavar="N",
            type=int,
            default=200,
            help="When outputs (for instance subprocess outputs) exceed 2*N"
            " lines, only include the first and last N lines in logs. This is"
            " necessary when storage for testsuite results have size limits,"
            " and the useful information is generally either at the beginning"
            " or the end of such outputs. If 0, never truncate logs.",
        )
        output_group.add_argument(
            "--dump-environ",
            dest="dump_environ",
            action="store_true",
            default=False,
            help="Dump all environment variables in a file named environ.sh,"
            " located in the output directory (see --output-dir). This"
            " file can then be sourced from a Bourne shell to recreate"
            " the environement that existed when this testsuite was run"
            " to produce a given testsuite report.",
        )

        exec_group = parser.add_argument_group(
            title="execution control arguments")
        exec_group.add_argument(
            "--max-consecutive-failures",
            "-M",
            metavar="N",
            type=int,
            default=self.default_max_consecutive_failures,
            help="Number of test failures (FAIL or ERROR) that trigger the"
            " abortion of the testuite. If zero, this behavior is disabled. In"
            " some cases, aborting the testsuite when there are just too many"
            " failures saves time and costs: the software to test/environment"
            " is too broken, there is no point to continue running the"
            " testsuite.",
        )
        exec_group.add_argument(
            "-j",
            "--jobs",
            dest="jobs",
            type=int,
            metavar="N",
            default=Env().build.cpu.cores,
            help="Specify the number of jobs to run simultaneously",
        )
        exec_group.add_argument(
            "--failure-exit-code",
            metavar="N",
            type=int,
            default=self.default_failure_exit_code,
            help="Exit code the testsuite must use when at least one test"
            " result shows a failure/error. By default, this is"
            f" {self.default_failure_exit_code}. This option is useful when"
            " running a testsuite in a continuous integration setup, as this"
            " can make the testing process stop when there is a regression.",
        )
        exec_group.add_argument(
            "--force-multiprocessing",
            action="store_true",
            help="Force the use of subprocesses to execute tests, for"
            " debugging purposes. This is normally automatically enabled when"
            " both the level of requested parallelism is high enough (to make"
            " it profitable regarding the contention of Python's GIL) and no"
            " test fragment has dependencies on other fragments. This flag"
            " forces the use of multiprocessing even if any of these two"
            " conditions is false.")
        parser.add_argument("sublist",
                            metavar="tests",
                            nargs="*",
                            default=[],
                            help="test")
        # Add user defined options
        self.add_options(parser)

        # Parse options
        self.main.parse_args(args)
        assert self.main.args is not None

        # If there is a chance for the logging to end up in a non-tty stream,
        # disable colors. If not, be user-friendly and automatically show error
        # outputs.
        if (self.main.args.log_file or not isatty(sys.stdout)
                or not isatty(sys.stderr)):
            enable_colors = False
        else:  # interactive-only
            enable_colors = True
            self.main.args.show_error_output = True
        self.colors = ColorConfig(enable_colors)
        self.Fore = self.colors.Fore
        self.Style = self.colors.Style

        self.env = Env()
        self.env.enable_colors = enable_colors
        self.env.root_dir = self.root_dir
        self.env.test_dir = self.test_dir

        # Setup output directories and create an index for the results we are
        # going to produce.
        self.output_dir: str
        self.old_output_dir: Optional[str]
        self.setup_result_dirs()
        self.report_index = ReportIndex(self.output_dir)

        # Set the cleanup mode from command-line arguments
        if self.main.args.cleanup_mode is not None:
            self.env.cleanup_mode = (
                cleanup_mode_map[self.main.args.cleanup_mode])
        elif self.main.args.disable_cleanup:
            logger.warning(
                "--disable-cleanup is deprecated and will disappear in a"
                " future version of e3-testsuite. Please use --cleanup-mode"
                " instead.")
            self.env.cleanup_mode = CleanupMode.NONE
        else:
            self.env.cleanup_mode = CleanupMode.default()

        # Settings for temporary directory creation
        temp_dir: str = self.main.args.temp_dir
        random_temp_subdir: bool = self.main.args.random_temp_subdir

        # The "--dev-temp" option forces several settings
        if self.main.args.dev_temp:
            self.env.cleanup_mode = CleanupMode.NONE
            temp_dir = self.main.args.dev_temp
            random_temp_subdir = False

        # Now actually setup the temporary directory: make sure we start from a
        # clean directory if we use a deterministic directory.
        #
        # Note that we do make sure that working_dir is an absolute path, as we
        # are likely to be changing directories when running each test. A
        # relative path would no longer work under those circumstances.
        temp_dir = os.path.abspath(temp_dir)
        if not random_temp_subdir:
            self.working_dir = temp_dir
            rm(self.working_dir, recursive=True)
            mkdir(self.working_dir)

        elif not os.path.isdir(temp_dir):
            # If the temp dir is supposed to be randomized, we need to create a
            # subdirectory, so check that the parent directory exists first.
            logger.critical("temp dir '%s' does not exist", temp_dir)
            return 1

        else:
            self.working_dir = tempfile.mkdtemp("", "tmp", temp_dir)

        # Create the exchange directory (to exchange data between the testsuite
        # main and the subprocesses running test fragments). Compute the name
        # of the file to pass environment data to subprocesses.
        self.exchange_dir = os.path.join(self.working_dir, "exchange")
        self.env_filename = os.path.join(self.exchange_dir, "_env.bin")
        mkdir(self.exchange_dir)

        # Make them both available to test fragments
        self.env.exchange_dir = self.exchange_dir
        self.env.env_filename = self.env_filename

        self.gaia_result_files: Dict[str, GAIAResultFiles] = {}
        """Mapping from test names to files for results in the GAIA report."""

        # Store in global env: target information and common paths
        self.env.output_dir = self.output_dir
        self.env.working_dir = self.working_dir
        self.env.options = self.main.args

        # Create an object to report testsuite execution status to users
        from e3.testsuite.running_status import RunningStatus
        self.running_status = RunningStatus(
            os.path.join(self.output_dir, "status"),
            self.main.args.status_update_interval,
        )

        # User specific startup
        self.set_up()

        # Retrieve the list of test
        self.test_list = self.get_test_list(self.main.args.sublist)

        # Create a DAG to constraint the test execution order
        dag = DAG()
        for parsed_test in self.test_list:
            self.add_test(dag, parsed_test)
        self.adjust_dag_dependencies(dag)
        dag.check()
        self.running_status.set_dag(dag)

        # Determine whether to use multiple processes for fragment execution
        # parallelism.
        self.use_multiprocessing = self.compute_use_multiprocessing()
        self.env.use_multiprocessing = self.use_multiprocessing

        # Record modules lookup path, including for the file corresponding to
        # the __main__ module.  Subprocesses will need it to have access to the
        # same modules.
        main_module = sys.modules["__main__"]
        self.env.modules_search_path = [
            os.path.dirname(os.path.abspath(main_module.__file__))
        ] + sys.path

        # Now that the env is supposed to be complete, dump it for the test
        # fragments to pick it up.
        self.env.store(self.env_filename)

        # For debugging purposes, dump the final DAG to a DOT file
        with open(os.path.join(self.output_dir, "tests.dot"), "w") as fd:
            fd.write(dag.as_dot())

        if self.use_multiprocessing:
            self.run_multiprocess_mainloop(dag)
        else:
            self.run_standard_mainloop(dag)

        self.report_index.write()
        self.dump_testsuite_result()
        if self.main.args.xunit_output:
            dump_xunit_report(self, self.main.args.xunit_output)
        if self.main.args.gaia_output:
            dump_gaia_report(self, self.output_dir, self.gaia_result_files)

        # Clean everything
        self.tear_down()

        # If requested, generate a text report
        if self.main.args.generate_text_report:
            # Use the previous testsuite results for comparison, if available
            old_index = (ReportIndex.read(self.old_output_dir)
                         if self.old_output_dir else None)

            # Include all information, except logs for successful tests, which
            # is just too verbose.
            with open(os.path.join(self.output_dir, "report"),
                      "w",
                      encoding="utf-8") as f:
                generate_report(
                    output_file=f,
                    new_index=self.report_index,
                    old_index=old_index,
                    colors=ColorConfig(colors_enabled=False),
                    show_all_logs=False,
                    show_xfail_logs=True,
                    show_error_output=True,
                    show_time_info=True,
                )

        # Return the appropriate status code: 1 when there is a framework
        # issue, the failure status code from the --failure-exit-code=N option
        # when there is a least one testcase failure, or 0.
        statuses = {
            s
            for s, count in self.report_index.status_counters.items() if count
        }
        if TestStatus.FAIL in statuses or TestStatus.ERROR in statuses:
            return self.main.args.failure_exit_code
        else:
            return 0
Esempio n. 21
0
    def run(self, args):
        sandbox = SandBox()
        sandbox.root_dir = args.sandbox

        if args.specs_dir:
            sandbox.specs_dir = args.specs_dir

        if args.create_sandbox:
            sandbox.create_dirs()

        if args.create_sandbox and args.spec_git_url:
            mkdir(sandbox.specs_dir)
            g = GitRepository(sandbox.specs_dir)
            if e3.log.default_output_stream is not None:
                g.log_stream = e3.log.default_output_stream
            g.init()
            g.update(args.spec_git_url, args.spec_git_branch, force=True)

        sandbox.dump_configuration()
        sandbox.write_scripts()

        asr = AnodSpecRepository(sandbox.specs_dir)
        check_api_version(asr.api_version)

        # Load plan content if needed
        if args.plan:
            if not os.path.isfile(args.plan):
                raise SandBoxError("plan file %s does not exist" % args.plan,
                                   origin="SandBoxExec.run")
            with open(args.plan, "r") as plan_fd:
                plan_content = ["def main_entry_point():"]
                plan_content += [
                    "    %s" % line for line in plan_fd.read().splitlines()
                ]
                plan_content = "\n".join(plan_content)

            env = BaseEnv()
            cm = PlanContext(server=env)
            store = None
            resolver = getattr(
                AnodContext,
                str(args.resolver),
                AnodContext.always_create_source_resolver,
            )
            logger.debug("Using resolver %s", resolver.__name__)

            # Declare available actions and their signature
            def anod_action(module,
                            build=None,
                            host=None,
                            target=None,
                            qualifier=None):
                pass  # all: no cover

            for a in ("anod_install", "anod_build", "anod_test"):
                cm.register_action(a, anod_action)

            # Load the plan and execute
            plan = Plan(data={})
            plan.load_chunk(plan_content)
            actions = cm.execute(plan, "main_entry_point")

            ac = AnodContext(asr, default_env=env)
            for action in actions:
                ac.add_anod_action(
                    action.module,
                    action,
                    action.action.replace("anod_", "", 1),
                    action.qualifier,
                )

            # Check if machine plan is locally schedulable
            action_list = ac.schedule(resolver)
            e = ElectrolytJobFactory(sandbox, asr, store, dry_run=args.dry_run)
            e.run(action_list)
Esempio n. 22
0
def gprbuild(driver,
             project_file=None,
             cwd=None,
             gcov=False,
             scenario=None,
             gpr_project_path=None,
             timeout=DEFAULT_TIMEOUT,
             **kwargs):
    """Launch gprbuild.

    :param project_file: project file to compile. If None, we looks first for
        a test.gpr in the test dir and otherwise fallback on the common
        test.gpr project of the support subdir of the testsuite.
    :type project_file: str
    :param cwd: directory in which to run gprbuild. If None the gprbuild build
        is run in the default working dir for the test.
    :type cwd: str | None
    :param gcov: if True link with gcov libraries
    :type gcov: bool
    :param scenario: scenario variable values
    :type scenario: dict
    :param gpr_project_path: if not None prepent this value to GPR_PROJECT_PATH
    :type gpr_project_path: None | str
    :param kwargs: additional keyword arguements are passed to
        e3.testsuite.process.check_call function
    :return: True on successful completion
    :rtype: bool
    """
    if scenario is None:
        scenario = {}

    if project_file is None:
        project_file = os.path.join(driver.test_env['test_dir'], 'test.gpr')
        if not os.path.isfile(project_file):
            project_file = os.path.join(TESTSUITE_ROOT_DIR, 'support',
                                        'test.gpr')
            scenario['TEST_SOURCES'] = driver.test_env['test_dir']

    if cwd is None:
        cwd = driver.test_env['working_dir']
    mkdir(cwd)
    gprbuild_cmd = [
        'gprbuild', '--relocate-build-tree', '-p', '-P', project_file
    ]
    for k, v in scenario.iteritems():
        gprbuild_cmd.append('-X%s=%s' % (k, v))
    if gcov:
        gprbuild_cmd += [
            '-largs', '-lgcov', '-cargs', '-fprofile-arcs', '-ftest-coverage',
            '-g'
        ]
    elif driver.env.gnatcov:
        # TODO: GNATcoverage relies on debug info to do its magic. It needs
        # consistent paths to source files in the debug info, so do not build
        # tests with debug info, as they will reference installed sources
        # (while GNATCOLL objects reference original sources).
        gprbuild_cmd += ['-g0']

    # Adjust process environment
    env = None
    if gpr_project_path:
        new_gpr_path = gpr_project_path
        if 'GPR_PROJECT_PATH' in os.environ:
            new_gpr_path += os.path.pathsep + os.environ['GPR_PROJECT_PATH']
        env = {'GPR_PROJECT_PATH': new_gpr_path}

    check_call(driver,
               gprbuild_cmd,
               cwd=cwd,
               env=env,
               ignore_environ=False,
               timeout=timeout,
               **kwargs)
    # If we get there it means the build succeeded.
    return True
Esempio n. 23
0
def make_gnatcoll(work_dir, debug=False, gcov=False, gnatcov=False):
    """Build gnatcoll core with or without gcov instrumentation.

    :param str work_dir: Working directory. GNATcoll is built in `build` subdir
        and installed in `install` subdir.

    :param bool debug: Whether to build GNATCOLL in debug mode. Otherwise, use
        the prod mode. Note that gcov and gnatcov modes automatically enable
        debug mode.

    :param bool gcov: If true, build GNATCOLL with gcov instrumentation in
        debgu mode.

    :param bool gnatcov: If True, build GNATCOLL with the compile options that
        GNATcoverage require in debug mode.

    :return: A triplet (project path, source path, object path).
    :rtype: (str, str, str)
    :raise AssertError: In case compilation of installation fails.
    """
    assert not (gcov and gnatcov)

    if gcov:
        tag = ' (gcov)'
    elif gnatcov:
        tag = ' (gnatcov)'
    else:
        tag = ''
    logging.info('Compiling gnatcoll{}'.format(tag))

    # Create build tree structure
    build_dir = os.path.join(work_dir, 'build')
    install_dir = os.path.join(work_dir, 'install')
    mkdir(build_dir)
    mkdir(install_dir)

    # Compute make invocation
    make_gnatcoll_cmd = [
        'make', '-f',
        os.path.join(GNATCOLL_ROOT_DIR, 'Makefile'), 'ENABLE_SHARED=no',
        'BUILD={}'.format('DEBUG' if debug or gcov or gnatcov else 'PROD')
    ]
    if gcov:
        make_gnatcoll_cmd += [
            'GPRBUILD_OPTIONS=-cargs -fprofile-arcs -ftest-coverage -gargs'
        ]
    elif gnatcov:
        make_gnatcoll_cmd += [
            'GPRBUILD_OPTIONS=-cargs -fdump-scos -fpreserve-control-flow'
            ' -gargs'
        ]

    # Build & Install
    p = Run(make_gnatcoll_cmd, cwd=build_dir, timeout=DEFAULT_TIMEOUT)
    assert p.status == 0, "gnatcoll build failed:\n%s" % p.out

    p = Run(make_gnatcoll_cmd + ['prefix=%s' % install_dir, 'install'],
            cwd=build_dir,
            timeout=DEFAULT_TIMEOUT)
    assert p.status == 0, "gnatcoll installation failed:\n%s" % p.out

    return (os.path.join(install_dir, 'share', 'gpr'),
            os.path.join(install_dir, 'include', 'gnatcoll'),
            os.path.join(build_dir, 'obj', 'gnatcoll', 'static'))
 def _create_cache_dir(self):
     mkdir(self.cache_dir)
Esempio n. 25
0
    def update(
        self,
        url: Optional[str] = None,
        revision: Optional[str] = None,
        force_and_clean: bool = False,
    ) -> bool:
        """Update a working copy or checkout a new one.

        If the directory is already a checkout, it tries to update it.
        If the directory is not associated to a (good) checkout or is empty
        it will checkout.
        The option --remove-unversioned of the svn subcommand
        cleanup exists only from svn version 1.9.
        :param url: URL of a SVN repository
        :param revision: specific revision (default is last)
        :param force_and_clean: if True: erase the content of non empty
        working_copy and use '--force' option for the svn update/checkout
        command
        :return: True if any local changes detected in the working copy
        :raise: SVNError
        """
        def is_clean_svn_dir(dir_path: str) -> Tuple[bool, bool]:
            """Return a tuple (True if dir is SVN directory, True if clean)."""
            if os.path.exists(os.path.join(dir_path, ".svn")):
                try:
                    status = self.svn_cmd(["status"], output=PIPE).out.strip()
                except SVNError:  # defensive code
                    return False, False
                if "warning: W" in status:
                    return False, False
                return True, status == ""
            return False, False

        def is_empty_dir(dir_path: str) -> bool:
            """Return True if the path is a directory and is empty."""
            return os.path.isdir(dir_path) and not os.listdir(dir_path)

        options: SVNCmd = ["--ignore-externals"]
        if revision:
            options += ["-r", revision]
        if force_and_clean:
            options += ["--force"]

        is_svn_dir, is_clean = is_clean_svn_dir(self.working_copy)
        if (is_svn_dir and (is_clean or not force_and_clean)
                and (not url or self.url == url)):
            update_cmd: SVNCmd = ["update"]
            self.svn_cmd(update_cmd + options)
            return not is_clean
        if os.path.exists(self.working_copy):
            if not is_empty_dir(self.working_copy) and not force_and_clean:
                raise SVNError(
                    f"not empty {self.working_copy} url {url}",
                    origin="update",
                )
            if is_svn_dir and not url:
                url = self.url
            rm(self.working_copy, recursive=True)

        mkdir(self.working_copy)
        checkout_cmd: SVNCmd = ["checkout", url, "."]
        self.svn_cmd(checkout_cmd + options)
        return not is_clean
Esempio n. 26
0
def make_gsh(work_dir, gcov=False, recompile=True):
    """Build gsh with or without gcov instrumentation.

    :param work_dir: working directory. gsh is built in `build` subdir
        and installed in `install` subdir
    :type work_dir: str
    :param gcov: if False then build gcov in PROD mode, otherwise
        build it with gcov instrumentation in DEBUG mode
    :type gcov: bool
    :return: a triplet (project path, source path, object path)
    :rtype: (str, str, str)
    :raise AssertError: in case compilation of installation fails
    """
    logging.info('Compiling projects (gcov=%s)' % gcov)

    # Create build tree structure
    build_dir = os.path.join(work_dir, 'build')
    install_dir = os.path.join(work_dir, 'install')
    os.environ['GPR_PROJECT_PATH'] = os.path.join(install_dir, 'share', 'gpr')
    mkdir(build_dir)
    mkdir(install_dir)

    # Compute make invocation
    gprbuild_opts = []
    if gcov:
        gprbuild_opts = ['-cargs', '-fprofile-arcs', '-ftest-coverage',
                         '-largs', '-lgcov',
                         '-gargs']

    if recompile:
        for project in ('c', 'os', 'gsh'):
            logging.info('Compiling project %s', project)
            obj_dir = os.path.join(build_dir, project)
            mkdir(obj_dir)

            # Build & Install
            p = Run(['gprbuild', '--relocate-build-tree', '-p', '-P',
                     os.path.join(GSH_ROOT_DIR, project,
                                  '%s.gpr' % project)] + gprbuild_opts,
                    cwd=obj_dir, timeout=DEFAULT_TIMEOUT,
                    output=None)

            assert p.status == 0, \
                "%s installation failed:\n%s" % (project, p.out)

            p = Run(['gprinstall', '-p', '-f', '--prefix=%s' % install_dir,
                     '--relocate-build-tree',
                     '-P', os.path.join(GSH_ROOT_DIR, project,
                                        '%s.gpr' % project)],
                    cwd=obj_dir, timeout=DEFAULT_TIMEOUT)
            assert p.status == 0, \
                "%s installation failed:\n%s" % (project, p.out)

        # Build & Install
        p = Run(['gprbuild', '--relocate-build-tree', '-p', '-P',
                 os.path.join(GSH_ROOT_DIR,
                              project, '%s.gpr' % project)] +
                gprbuild_opts,
                cwd=obj_dir, timeout=DEFAULT_TIMEOUT,
                output=None)

        assert p.status == 0, "%s installation failed:\n%s" % (project, p.out)

        p = Run(['gprinstall', '-p', '-f', '--prefix=%s' % install_dir,
                 '--relocate-build-tree',
                 '-P', os.path.join(GSH_ROOT_DIR,
                                    project, '%s.gpr' % project)],
                cwd=obj_dir, timeout=DEFAULT_TIMEOUT)
        assert p.status == 0, "%s installation failed:\n%s" % (project, p.out)

        # Build & Install
        p = Run(['gprbuild', '--relocate-build-tree', '-p', '-P',
                 os.path.join(GSH_ROOT_DIR, 'posix_shell.gpr')] +
                gprbuild_opts,
                cwd=obj_dir, timeout=DEFAULT_TIMEOUT,
                output=None)

        assert p.status == 0, "mains installation failed:\n%s" % p.out

        p = Run(['gprinstall', '-p', '-f', '--prefix=%s' % install_dir,
                 '--relocate-build-tree',
                 '-P', os.path.join(GSH_ROOT_DIR, 'posix_shell.gpr')],
                cwd=obj_dir, timeout=DEFAULT_TIMEOUT)
        assert p.status == 0, "mains installation failed:\n%s" % p.out

    return (os.path.join(install_dir, 'share', 'gpr'),
            os.path.join(install_dir, 'include'),
            build_dir)
Esempio n. 27
0
def test_unlink():
    work_dir = os.getcwd()

    test_dir_path = os.path.join(work_dir, 'dir')
    deleted_file_path = os.path.join(test_dir_path, 'deleted2.txt')
    mkdir(test_dir_path)

    ntfile = NTFile(test_dir_path)
    ntfile3 = NTFile(test_dir_path)
    ntfile2 = NTFile(deleted_file_path)

    try:
        # delete inexisting file
        ntfile2.unlink()

        # delete file with readonly attribute
        touch(deleted_file_path)
        ntfile2.read_attributes()
        ntfile2.basic_info.file_attributes.attr |= FileAttribute.READONLY

        assert 'READONLY' in str(ntfile2.basic_info.file_attributes)
        ntfile2.write_attributes()
        ntfile2.unlink()

        # delete file already pending deletion
        touch(deleted_file_path)
        ntfile2.open(Access.DELETE, Share.DELETE)
        ntfile2.dispose()
        ntfile2.unlink()

        # delete containing directory
        ntfile.unlink()

        ntfile.close()
        ntfile2.close()

        mkdir(test_dir_path)
        ntfile.open(Access.LIST_DIRECTORY, Share.ALL)
        ntfile3.unlink()

    finally:
        ntfile.close()
        ntfile2.close()
        ntfile2.close()

    ntfile = NTFile('nul')
    with pytest.raises(NTException) as err:
        ntfile.unlink()
    ntfile.close()
    assert 'NTFile.read_attributes:' in str(err)

    # A directory that is not empty cannot be deleted
    dir_to_delete = os.path.join(test_dir_path, 'dir_to_delete')
    mkdir(dir_to_delete)
    touch(os.path.join(dir_to_delete, 'afile.txt'))
    ntfile = NTFile(dir_to_delete)
    try:
        with pytest.raises(NTException) as err:
            ntfile.unlink()
    finally:
        ntfile.close()

    # A directory that is already opened and not empty cannot be
    # moved to trash
    dir_to_delete = os.path.join(test_dir_path, 'dir_to_delete')
    mkdir(dir_to_delete)
    touch(os.path.join(dir_to_delete, 'afile.txt'))

    ntfile = NTFile(dir_to_delete)
    ntfile2 = NTFile(dir_to_delete)
    try:
        ntfile.open(Access.LIST_DIRECTORY, Share.ALL)
        with pytest.raises(NTException) as err:
            ntfile2.unlink()
    finally:
        ntfile.close()
        ntfile2.close()

    # Try to delete a file that we cannot open
    ntfile = NTFile(deleted_file_path)
    ntfile2 = NTFile(deleted_file_path)
    try:
        touch(deleted_file_path)
        ntfile.open(Access.READ_DATA, Share.NOTHING)
        with pytest.raises(NTException) as err:
            ntfile2.unlink()
    finally:
        ntfile.close()
        ntfile2.close()
Esempio n. 28
0
 def prepare(self, prev, slot):
     mkdir(self.test_env['working_dir'])
     sync_tree(self.test_env['test_dir'], self.test_env['working_dir'])
     base = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
     Env().add_search_path('PYTHONPATH', base)
Esempio n. 29
0
def test_svn_repo():
    cwd = os.getcwd()

    # --- create local project
    project_path = os.path.join(cwd, "test_project")
    mkdir(project_path)
    mkdir(os.path.join(project_path, "trunk"))
    hello_relative_path = os.path.join("trunk", "hello.txt")
    hello_path = os.path.join(project_path, hello_relative_path)
    echo_to_file(hello_path, "hello")

    # --- create a SVN repository from that project
    repos_path = os.path.join(cwd, "repos")
    project_url = SVNRepository.create(repo_path=repos_path)
    project_url = project_url + "/Test_Project"
    p = Run(
        ["svn", "import", project_path, project_url, "-m", "initial import"])
    assert p.status == 0, p.out

    # --- checkout project into working dir A
    working_copy_a_path = os.path.join(cwd, "working_copy_a")
    svn_a = SVNRepository(working_copy_a_path)
    with pytest.raises(SVNError):
        svn_a.update()
    with pytest.raises(SVNError):
        svn_a.update(url=file_url("bad_url"))
    local_change = svn_a.update(project_url)
    assert local_change
    local_change = svn_a.update()
    assert not local_change
    # verify the content of the working dir A and its revision
    assert svn_a.url == project_url
    assert os.path.exists(
        os.path.join(working_copy_a_path,
                     hello_relative_path)), "checkout failed"
    assert svn_a.current_revision == "1"
    # modify the working dir, commit the change,
    # update the working dir and verify the new current revision
    echo_to_file(os.path.join(working_copy_a_path, hello_relative_path), "bye")
    svn_a.svn_cmd(["commit", "-m", "modify hello"])
    svn_a.update()
    assert svn_a.current_revision == "2"
    svn_a.update(revision="1")
    assert svn_a.current_revision == "1"
    with pytest.raises(SVNError):
        svn_a.update(revision="404")

    # make local changes in the working dir B before updating it
    working_copy_b_path = os.path.join(cwd, "working_copy_b")
    svn_b = SVNRepository(working_copy_b_path)
    svn_b.update(project_url)
    foo_path = os.path.join(working_copy_b_path, "trunk", "foo")
    touch(foo_path)
    hello_b_path = os.path.join(working_copy_b_path, hello_relative_path)
    echo_to_file(hello_b_path, "kitty")
    local_change = svn_b.update()
    assert local_change
    assert os.path.exists(foo_path)
    with open(hello_b_path, "r") as f:
        assert "kitty" in f.read()
    # update and cancel all changes
    svn_b.update(force_and_clean=True)
    assert not os.path.exists(foo_path)
    with open(hello_b_path, "r") as f:
        assert "bye" in f.read()

    # checkout into an existing path (not versioned)
    working_copy_c_path = os.path.join(cwd, "working_copy_c")
    svn_c = SVNRepository(working_copy_c_path)
    touch(working_copy_c_path)
    with pytest.raises(SVNError) as err:
        svn_c.update(url=project_url)
    assert "not empty" in str(err)
    rm(working_copy_c_path)
    mkdir(working_copy_c_path)

    bar_path = os.path.join(working_copy_c_path, "bar")
    touch(bar_path)
    # verify failures without the force option
    with pytest.raises(SVNError) as err:
        svn_c.update(url=project_url)
    assert "not empty" in str(err)
    touch(os.path.join(working_copy_c_path, ".svn"))
    with pytest.raises(SVNError):
        svn_c.update(url=project_url)
    svn_c.update(url=project_url, force_and_clean=True)
    # verify that the working dir C is clean
    assert not os.path.exists(bar_path)

    # modify a working dir and update it with a new project
    svn_d = SVNRepository(working_copy_c_path)
    touch(bar_path)
    svn_d.update()  # update with the last URL used for this dir
    svn_d.update(url=project_url)  # update with the same URL
    assert os.path.exists(bar_path)
    project2_url = project_url + "2"
    p = Run(
        ["svn", "import", project_path, project2_url, "-m", "initial import"])
    assert p.status == 0, p.out
    with pytest.raises(SVNError) as err:
        svn_d.update(url=project2_url)  # update with new URL
    assert "not empty" in str(err)
    svn_d.update(url=project2_url, force_and_clean=True)
    assert svn_d.url == project2_url
Esempio n. 30
0
 def create_dirs(self):
     """Create all required sandbox directories."""
     for d in self.dirs:
         mkdir(getattr(self, ('%s_dir' % d).replace(os.path.sep, '_')))
Esempio n. 31
0
from e3.fs import mkdir
from e3.os.fs import touch
import os

mkdir("test1")
for f in range(1000):
    touch(os.path.join("test1", str(f"file-{f}")))
for f in range(1000):
    mkdir(os.path.join("test1", str(f"dir-{f}")))
Esempio n. 32
0
    def package_deps(self, package_dir):
        """
        Copy all libraries that are not part of GNAT Pro to the package
        directory.

        Once this is done, this package + GNAT Pro can be used in order to
        build Ada projects that depend on Langkit-generated libraries.

        :param str package_dir: Name of the directory where the package should
            be created.
        """
        # Destination directory for copies of static libs. Make sure it exists.
        if self.with_static:
            static_libdir = os.path.join(package_dir, self.static_libdir_name)
            mkdir(static_libdir)

        # Likewise for the destination directory for copies of dynamic libs
        if self.with_relocatable:
            dyn_libdir = os.path.join(package_dir, self.dyn_libdir_name)
            mkdir(dyn_libdir)

        def copy_in(filename, dirname):
            """Copy the "filename" to the "dirname" directory."""
            cp(filename, os.path.join(dirname, os.path.basename(filename)))

        # Ship gnatcoll-iconv and gnatcoll-gmp. Copy all files that gprinstall
        # created: shared libs, static libs, manifests, sources, etc.
        for prefix, name in [
            (self.gnatcoll_gmp_prefix, 'gmp'),
            (self.gnatcoll_iconv_prefix, 'iconv'),
        ]:
            # In all of the following directories, look for files/directories
            # that matches "*gnatcoll_$name*" and copy them in $package_dir,
            # preserving the directory hierarchy.
            for d in ('bin', 'include', 'lib', os.path.join('share', 'gpr'),
                      os.path.join('share', 'gpr', 'manifests')):
                to_copy = glob.glob(
                    os.path.join(prefix, d, '*gnatcoll_{}*'.format(name)))
                for item in to_copy:
                    rel_item = os.path.relpath(item, prefix)
                    sync_tree(item,
                              os.path.join(package_dir, rel_item),
                              delete=False)

        # TODO??? For some reason, gnatcoll_gmp's project file tells the linker
        # to always put "-lgmp" although it's not needed when linking with
        # libgnatcoll_gmp.so (as it contains libgmp.a already). As a result,
        # linking programs with Libadalang can fail because of missing gmp
        # although it's already available. Investigation happens under
        # R613-014. To workaround this, just provide the static library.
        #
        # Likewise for gnatcoll_iconv/libiconv.a.
        #
        # On Linux 64-bit, copy these static libraries to lib64 so that they
        # take precedence over shared libs that would also be in lib64
        # directories.
        #
        # So ship gmp and libiconv.
        if self.with_static:
            lib_files = [os.path.join(self.gmp_prefix, 'lib', 'libgmp.a')]
            if self.libiconv_prefix:
                lib_files.append(
                    os.path.join(self.libiconv_prefix, 'lib', 'libiconv.a'))
            for f in lib_files:
                copy_in(f, static_libdir)

        # Ship libiconv's shared lib, as needed by the shared
        # libgnatcoll_iconv.
        if self.with_relocatable and self.libiconv_prefix:
            for item in glob.glob(
                    os.path.join(self.libiconv_prefix, self.dyn_libdir_name,
                                 'libiconv*' + self.dllext + '*')):
                copy_in(item, dyn_libdir)
Esempio n. 33
0
 def create_dirs(self) -> None:
     """Create all required sandbox directories."""
     for d in self.dirs:
         mkdir(getattr(self, f"{d}_dir".replace(os.path.sep, "_")))
Esempio n. 34
0
 def prepare(self, previous_values):
     mkdir(self.test_env['working_dir'])
     sync_tree(self.test_env['test_dir'], self.test_env['working_dir'])
Esempio n. 35
0
             'statement not executed')
check_report('unexpected decision coverage failure indication',
             'def.rep',
             'decision outcome .* never exercised',
             check_present=False)

# Override --level up to DC. Check that we now find the DC violations we
# expect.

run(['-o', 'lev.rep'], covlevel='stmt+decision')
check_report('missing expected decision coverage failure indication',
             'lev.rep', 'decision outcome .* never exercised')

# Override --annotate only. Expect full coverage on the "if"
# statement.

mkdir('sc')
run(['--annotate=xcov', '--output-dir=sc'])
check_report('missing expected full coverage indication', 'sc/values.adb.xcov',
             r'\+:.*if')

# Override --annotate and --level. Expect partial coverage on the "if"
# statement.

mkdir('dc')
run(['--annotate=xcov', '--output-dir=dc'], covlevel='stmt+decision')
check_report('missing expected partial decision coverage indication',
             'dc/values.adb.xcov', '!:.*if')

thistest.result()
Esempio n. 36
0
 def create_dirs(self):
     """Create all required sandbox directories."""
     for d in self.dirs:
         mkdir(getattr(self, ('%s_dir' % d).replace(os.path.sep, '_')))
Esempio n. 37
0
def gprbuild(driver,
             project_file=None,
             cwd=None,
             gcov=False,
             scenario=None,
             gpr_project_path=None,
             **kwargs):
    """Launch gprbuild.

    :param project_file: project file to compile. If None, we looks first for
        a test.gpr in the test dir and otherwise fallback on the common
        test.gpr project of the support subdir of the testsuite.
    :type project_file: str
    :param cwd: directory in which to run gprbuild. If None the gprbuild build
        is run in the default working dir for the test.
    :type cwd: str | None
    :param gcov: if True link with gcov libraries
    :type gcov: bool
    :param scenario: scenario variable values
    :type scenario: dict
    :param gpr_project_path: if not None prepent this value to GPR_PROJECT_PATH
    :type gpr_project_path: None | str
    :param kwargs: additional keyword arguements are passed to
        e3.testsuite.process.check_call function
    :return: True on successful completion
    :rtype: bool
    """
    if scenario is None:
        scenario = {}

    if cwd is None:
        cwd = driver.test_env['working_dir']
    mkdir(cwd)

    if project_file is None:
        project_file = os.path.join(driver.test_env['test_dir'],
                                    'test.gpr')
        if not os.path.isfile(project_file):
            project_file = os.path.join(cwd, 'test.gpr')
            with open(os.path.join(TESTSUITE_ROOT_DIR, 'support',
                                   'test.gpr'), 'r') as fd:
                content = fd.read()
            with open(project_file, 'w') as fd:
                for component in driver.test_env.get('components', []):
                    fd.write('with "%s";\n' % PROJECT[component])
                fd.write(content)
            scenario['TEST_SOURCES'] = driver.test_env['test_dir']
    scenario['SUPPORT_SOURCES'] = os.path.join(TESTSUITE_ROOT_DIR, 'support')

    gprbuild_cmd = [
        'gprbuild', '--relocate-build-tree', '-p', '-P', project_file]
    for k, v in scenario.iteritems():
        gprbuild_cmd.append('-X%s=%s' % (k, v))
    if gcov:
        gprbuild_cmd += ['-largs', '-lgcov', '-cargs',
                         '-fprofile-arcs', '-ftest-coverage', '-g']

    # Adjust process environment
    env = None
    if gpr_project_path:
        new_gpr_path = gpr_project_path
        if 'GPR_PROJECT_PATH' in os.environ:
            new_gpr_path += os.path.pathsep + os.environ['GPR_PROJECT_PATH']
        env = {'GPR_PROJECT_PATH': new_gpr_path}

    check_call(
        driver,
        gprbuild_cmd,
        cwd=cwd,
        env=env,
        ignore_environ=False,
        **kwargs)
    # If we get there it means the build succeeded.
    return True
Esempio n. 38
0
def test_svn_repo():
    cwd = os.getcwd()

    # --- create local project
    project_path = os.path.join(cwd, 'test_project')
    mkdir(project_path)
    mkdir(os.path.join(project_path, 'trunk'))
    hello_relative_path = os.path.join('trunk', 'hello.txt')
    hello_path = os.path.join(project_path, hello_relative_path)
    echo_to_file(hello_path, 'hello')

    # --- create a SVN repository from that project
    repos_path = os.path.join(cwd, 'repos')
    project_url = file_url(repos_path + '/Test_project')
    p = Run(['svnadmin', 'create', repos_path])
    assert p.status == 0, p.out
    p = Run(
        ['svn', 'import', project_path, project_url, '-m', 'initial import'])
    assert p.status == 0, p.out

    # --- checkout project into working dir A
    working_copy_a_path = os.path.join(cwd, 'working_copy_a')
    svn_a = SVNRepository(working_copy_a_path)
    with pytest.raises(SVNError):
        svn_a.update()
    with pytest.raises(SVNError):
        svn_a.update(url=file_url('bad_url'))
    local_change = svn_a.update(project_url)
    assert local_change
    local_change = svn_a.update()
    assert not local_change
    # verify the content of the working dir A and its revision
    assert svn_a.url == project_url
    assert os.path.exists(
        os.path.join(working_copy_a_path,
                     hello_relative_path)), 'checkout failed'
    assert svn_a.current_revision == '1'
    # modify the working dir, commit the change,
    # update the working dir and verify the new current revision
    echo_to_file(os.path.join(working_copy_a_path, hello_relative_path), 'bye')
    svn_a.svn_cmd(['commit', '-m', 'modify hello'])
    svn_a.update()
    assert svn_a.current_revision == '2'
    svn_a.update(revision='1')
    assert svn_a.current_revision == '1'
    with pytest.raises(SVNError):
        svn_a.update(revision='404')

    # make local changes in the working dir B before updating it
    working_copy_b_path = os.path.join(cwd, 'working_copy_b')
    svn_b = SVNRepository(working_copy_b_path)
    svn_b.update(project_url)
    foo_path = os.path.join(working_copy_b_path, 'trunk', 'foo')
    touch(foo_path)
    hello_b_path = os.path.join(working_copy_b_path, hello_relative_path)
    echo_to_file(hello_b_path, 'kitty')
    local_change = svn_b.update()
    assert local_change
    assert os.path.exists(foo_path)
    with open(hello_b_path, 'r') as f:
        assert 'kitty' in f.read()
    # update and cancel all changes
    svn_b.update(force_and_clean=True)
    assert not os.path.exists(foo_path)
    with open(hello_b_path, 'r') as f:
        assert 'bye' in f.read()

    # checkout into an existing path (not versioned)
    working_copy_c_path = os.path.join(cwd, 'working_copy_c')
    svn_c = SVNRepository(working_copy_c_path)
    touch(working_copy_c_path)
    with pytest.raises(SVNError) as err:
        svn_c.update(url=project_url)
    assert 'not empty' in str(err)
    rm(working_copy_c_path)
    mkdir(working_copy_c_path)

    bar_path = os.path.join(working_copy_c_path, 'bar')
    touch(bar_path)
    # verify failures without the force option
    with pytest.raises(SVNError) as err:
        svn_c.update(url=project_url)
    assert 'not empty' in str(err)
    touch(os.path.join(working_copy_c_path, '.svn'))
    with pytest.raises(SVNError):
        svn_c.update(url=project_url)
    svn_c.update(url=project_url, force_and_clean=True)
    # verify that the working dir C is clean
    assert not os.path.exists(bar_path)

    # modify a working dir and update it with a new project
    svn_d = SVNRepository(working_copy_c_path)
    touch(bar_path)
    svn_d.update()  # update with the last URL used for this dir
    svn_d.update(url=project_url)  # update with the same URL
    assert os.path.exists(bar_path)
    project2_url = project_url + '2'
    p = Run(
        ['svn', 'import', project_path, project2_url, '-m', 'initial import'])
    assert p.status == 0, p.out
    with pytest.raises(SVNError) as err:
        svn_d.update(url=project2_url)  # update with new URL
    assert 'not empty' in str(err)
    svn_d.update(url=project2_url, force_and_clean=True)
    assert svn_d.url == project2_url