Esempio n. 1
0
 def copy_sid_files(filenames, dirnames):
     """
     Copy SID files (``filenames``, a glob pattern) to each of the given
     directories (``dirnames``, a list of directories).
     """
     for d in dirnames:
         cp(filenames, d)
Esempio n. 2
0
 def install_shortcuts(self):
     """Create shortcuts and additional directories"""
     logging.info('Install shortcuts')
     mkdir('c:/cygwin/mingw')
     mkdir('c:/cygwin/mingw/include')
     cp(os.path.join(setup_dir, 'shortcuts', '*.lnk'),
        os.path.join(os.environ['USERPROFILE'], 'desktop'))
Esempio n. 3
0
 def sync(tr):
     target_dir = os.path.join(self.repodir, "testsuite",
                               os.path.dirname(tr))
     if os.path.exists(target_dir):
         cp(tr, target_dir)
     else:
         print("ERRRR !! inexistant target dir for %s" % tr)
Esempio n. 4
0
    def prepare(self, previous_values, slot):
        testsuite_dir = os.path.join(os.path.dirname(__file__), "..")
        mkdir(self.test_env["working_dir"])
        sync_tree(self.test_env["test_dir"], self.test_env["working_dir"])

        # Create .gnatstudio
        self.gps_home = os.path.join(self.test_env["working_dir"],
                                     ".gnatstudio")
        mkdir(self.gps_home)

        # Populate the .gnatstudio dir
        sync_tree(
            os.path.abspath(os.path.join(testsuite_dir, "gnatstudio_home")),
            self.gps_home,
            delete=False,
        )
        if self.env.options.pycov:
            # Copy the coverage preference
            cp(
                os.path.join(testsuite_dir, "pycov_data", ".coveragerc"),
                self.test_env["working_dir"],
            )
            py_name = ".coverage"
            py_dir = os.path.join(testsuite_dir, "pycov_data")
            mkdir(py_dir)
            self.test_env["pycov"] = os.path.abspath(
                os.path.join(py_dir, py_name))
        else:
            self.test_env["pycov"] = ""
Esempio n. 5
0
    def tear_up(self, prev):
        super(DefaultDriver, self).tear_up(prev)

        # REMARK: Why do I have to mkdir the working dir ??
        fs.mkdir(self.working_dir())
        with open(self.working_dir('gen.gpr'), 'w') as f:
            f.write('''
            with "ada_py_bind";

            library project Gen is
               for Source_Dirs use ("{}");
               for Library_Dir use "test";
               for Create_Missing_Dirs use "True";

               for Library_Name use "gen";
               for Library_Kind use "relocatable";
               for Library_Standalone use "standard";
               for Library_Auto_Init use "true";
               for Library_Interface use ("demo");
               for Object_Dir use "obj";

               package Compiler renames Ada_Py_Bind.Compiler;
               for Leading_Library_Options use Ada_Py_Bind.Py_Bind_Lib_Options;
            end Gen;
            '''.format(P.abspath(self.test_dir())))

        # Copy build_lib.py to working directory
        fs.cp(P.join(P.dirname(P.abspath(__file__)), "build_lib.py"),
              self.working_dir('build_lib.py'))
Esempio n. 6
0
    def run(self):
        # Build the test program
        if self.test_env.get('no-coverage'):
            gpr_project_path = self.env.gnatcoll_debug_gpr_dir
        else:
            gpr_project_path = self.env.gnatcoll_gpr_dir
        gprbuild(self, gcov=self.env.gcov, gpr_project_path=gpr_project_path)

        # Copy the requested data files
        for data in self.test_env.get('data', []):
            cp(os.path.join(self.test_env['test_dir'], data),
               self.test_env['working_dir'], recursive=True)

        pre_test_py = os.path.join(self.test_env['test_dir'], 'pre_test.py')
        if os.path.isfile(pre_test_py):
            check_call(self, [interpreter(), pre_test_py],
                       cwd=self.test_env['working_dir'],
                       timeout=self.default_process_timeout)

        # Run the test program
        test_exe = self.test_env.get('test_exe', 'obj/test')
        process = run_test_program(
            self,
            [os.path.join(self.test_env['working_dir'], test_exe)],
            timeout=self.default_process_timeout)
        self.output += process.out.decode('utf-8')
Esempio n. 7
0
 def send_event(self, event: Event) -> bool:
     d = event.as_dict()
     prefix = "{}-{}".format(d["name"], d["uid"])
     log_file = os.path.join(self.log_dir, prefix + ".log")
     attach_dir = os.path.join(self.log_dir, prefix)
     mkdir(attach_dir)
     with open(log_file, "w") as fd:
         json.dump(d, fd, indent=2, sort_keys=True)
     for name, attachment in list(event.get_attachments().items()):
         cp(attachment[0], os.path.join(attach_dir, name))
     return True
Esempio n. 8
0
 def _capture_for_developers(self):
     """Utility for GPS developers: if GPS_DEV is set, capture the
        logs in $GPS_DEV
     """
     printed = ""
     if GPS_DEV in os.environ:
         printed = "\n"
         tgt = os.environ[GPS_DEV]
         for g in glob.glob(os.path.join(self.gps_home, "log", "*")):
             cp(g, tgt)
             printed += "captured log: {}\n".format(
                 os.path.join(tgt, os.path.basename(g)))
     return printed
Esempio n. 9
0
 def copy_shared_lib(self, pattern, dest):
     """
     Copy the shaed library (or libraries) matched by "pattern" to the
     "dest" directory.
     """
     self.assert_with_relocatable()
     # On Linux, the name of shared objects files can (but does not need
     # to) be followed by a version number. If both flavors are present,
     # chose the ones with a version number first, as these will be the
     # one the linker will chose.
     if self.env.build.os.name == 'linux' and glob.glob(pattern + '.*'):
         pattern += '.*'
     cp(pattern, dest)
Esempio n. 10
0
    def run(self):
        builder = self.data.builder
        cache_dir = self.sandbox.tmp_cache_dir

        skip = False
        if os.path.isfile(os.path.join(
                cache_dir, builder.filename)) and os.path.isfile(
                    os.path.join(cache_dir, builder.filename + ".sha1")):
            with open(os.path.join(cache_dir, builder.filename + ".sha1"),
                      "rb") as f:
                checksum = f.read(1024).decode()
            skip = checksum == hash.sha1(
                os.path.join(cache_dir, builder.filename))

        if skip:
            self.run_status = ReturnValue.skip
        else:
            if builder.url.startswith("https://") or builder.url.startswith(
                    "http://"):

                if os.path.isfile(os.path.join(cache_dir, builder.filename)):
                    rm(os.path.join(cache_dir, builder.filename))
                if os.path.isfile(
                        os.path.join(cache_dir, builder.filename + ".sha1")):
                    rm(os.path.join(cache_dir, builder.filename + ".sha1"))

                s = HTTPSession(base_urls=[builder.base_url])
                result = s.download_file(url=builder.filename,
                                         dest=cache_dir,
                                         filename=builder.name)
                if result is None:
                    rm(os.path.join(cache_dir, builder.filename))
                    self.run_status = ReturnValue.failure
                else:
                    self.run_status = ReturnValue.success
                    with open(
                            os.path.join(cache_dir,
                                         builder.filename + ".sha1"),
                            "w") as f:
                        f.write(
                            hash.sha1(os.path.join(cache_dir,
                                                   builder.filename)))
            else:
                cp(
                    os.path.join(self.sandbox.specs_dir, "patches",
                                 builder.url),
                    cache_dir,
                )
                self.run_status = ReturnValue.success
Esempio n. 11
0
    def populate_package_dir(self, package_dir: str) -> None:
        super().populate_package_dir(package_dir=package_dir)

        handler_file = os.path.join(package_dir, "lambda_handler_module.py")
        with open(handler_file, "w") as fd:
            fd.write(
                STARTUP_CODE
                % {"app_module": self.app_module, "app_name": self.app_name}
            )
        wrapper_file = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            "flask_apigateway2_http_wrapper.py",
        )
        cp(wrapper_file, package_dir)
        sync_tree(package_dir, "/tmp/nico/package")
Esempio n. 12
0
    def check_run(self, previous_values):
        """Check status fragment."""
        if not previous_values['build']:
            return

        for data in self.test_env.get('data', []):
            cp(os.path.join(self.test_env['test_dir'], data),
               self.test_env['working_dir'],
               recursive=True)

        process = check_call(self, [self.test_env['test_exe']])
        if '<=== TEST PASSED ===>' not in process.out:
            self.result.set_status(TestStatus.FAIL)
        else:
            self.result.set_status(TestStatus.PASS)
        self.push_result()
Esempio n. 13
0
    def run(self, prev):
        fs.mkdir(self.working_dir('test'))
        fs.cp(self.test_dir('test.py'), self.working_dir('test', 'test.py'))

        # Try to build, but don't log errors in the build, and recover from
        # TestError: We want to be able to test compilation errors too.
        try:
            self.run_and_check([sys.executable, 'build_lib.py'],
                               log_errors=False)
        except TestError:
            return

        environ['PYTHONPATH'] = P.pathsep.join(
            keep([environ.get('PYTHONPATH'), self.working_dir('test')])
        )

        self.run_and_check(['python', self.working_dir('test', 'test.py')],
                           append_output=True)
Esempio n. 14
0
    def check_run(self, previous_values):
        """Check status fragment."""
        if not previous_values['build']:
            return

        for data in self.test_env.get('data', []):
            cp(os.path.join(self.test_env['test_dir'], data),
               self.test_env['working_dir'], recursive=True)

        process = self.run_test_program(
            [os.path.join(self.test_env['working_dir'],
                          self.test_env['test_exe'])],
            timeout=self.process_timeout)
        if '<=== TEST PASSED ===>' not in process.out:
            self.result.set_status(TestStatus.FAIL)
        else:
            self.result.set_status(TestStatus.PASS)
        self.push_result()
Esempio n. 15
0
    def __latch_into(self, dir, part, toplevel, copy_from=None):

        this_target_is_tree = (self.this_docformat == 'html')

        # Compute the target dir or file name for our copy:

        this_target = (dir if toplevel and this_target_is_tree else
                       os.path.join(dir, self.kititem_for(part=part)))

        # Compute the source dir or file name for our copy:

        # The local or provided source build subdir name, assuming a
        # sphinx setup, with an html or pdf sub-subdir depending on the
        # doc format. For file outputs, assume the builders are setup to
        # produce PART.<docformat>, e.g. TOR.pdf:

        this_build_subdir = os.path.join(
            copy_from if copy_from is not None else "build",
            sphinx_target_for[self.this_docformat])

        this_source = (this_build_subdir if this_target_is_tree else
                       os.path.join(this_build_subdir,
                                    part.upper() +
                                    ".%s" % self.this_docformat))

        # Delete an old version of latched results that might
        # already be there if we're running with --work-dir.

        remove(this_target)

        # Now proceed with the latch operation per se:

        if not this_target_is_tree:
            cp(this_source, this_target, recursive=False)

        elif copy_from:
            cp(this_source, this_target, recursive=True)

        else:
            mv(this_build_subdir, this_target)

        print("%s %s available in %s %s" %
              (self.this_docformat, part.upper(), this_target,
               "(toplevel)" if toplevel else ""))
Esempio n. 16
0
    def set_up(self):
        self.project_file = self.test_env.get("project_file")
        if self.project_file is None:
            cp(
                os.path.join(ADA_SUPPORT_FILES, "test.gpr"),
                self.test_env["working_dir"],
            )
            self.project_file = "./test.gpr"
            self.main = "./obj/test"
        else:
            self.main = self.test_env.get("./main")

        if not self.project_file or not isinstance(self.project_file, str):
            raise TestAbortWithError(
                'test.yaml: please define a "project_file" string field'
            )
        if not self.main or not isinstance(self.main, str):
            raise TestAbortWithError('test.yaml: please define a "main" string field')

        self.fake_ada_target = self.test_env.get("fake_ada_target")
        self.builder_and_runner = BuilderAndRunner(self)
Esempio n. 17
0
    def build(self, prev, slot):
        self.logger = logging.getLogger(f"test.{self.test_env['test_name']}")

        env = {
            "TEST_SOURCES": self.test_source_dir,
            "SUPPORT_SOURCES": self.support_source_dir,
        }

        mkdir(self.build_dir)
        py_files = ls(os.path.join(self.test_source_dir, "*.py"))
        if py_files:
            cp(py_files, self.build_dir)
        check_call(
            self,
            [
                "gprbuild", "-P", self.project_file, "--relocate-build-tree",
                "-p"
            ],
            cwd=self.build_dir,
            timeout=300,
            env=env,
            ignore_environ=False,
        )
Esempio n. 18
0
    def test_spec_loader_prolog_with_repos(self):
        sync_tree(self.spec_dir, "specs_dir")
        repositories_yaml = os.path.join("specs_dir", "config",
                                         "repositories.yaml")
        cp(repositories_yaml + ".tmpl", repositories_yaml)

        spec_repo = AnodSpecRepository("specs_dir")
        anod_class = spec_repo.load("prolog_test")
        assert anod_class.e3_version == "20.1"
        assert anod_class.has_foo is False
        assert anod_class.e3_extra_version is None

        override_conf = {
            "e3-core": {
                "revision": 21.0
            },
            "e3-extra": {
                "vcs": "git",
                "url": "unknown",
                "revision": "master"
            },
        }

        spec_config = SpecConfig()
        spec_config.foo = 2

        spec_repo2 = AnodSpecRepository(
            "specs_dir",
            spec_config=spec_config,
            extra_repositories_config=override_conf,
        )
        anod_class2 = spec_repo2.load("prolog_test")
        assert anod_class2.e3_version == "21.0"
        assert anod_class2.e3_extra_version == "master"

        assert anod_class2.has_foo is True
Esempio n. 19
0
 def deploy(self, dest):
     cp(self._path, dest)
Esempio n. 20
0
 def copy_in(filename, dirname):
     """Copy the "filename" to the "dirname" directory."""
     cp(filename, os.path.join(dirname, os.path.basename(filename)))
Esempio n. 21
0
from e3.env import Env
from e3.fs import cp
from e3.os.process import Run

if Env().host.platform.endswith('windows'):
    cp('config-windows.cgpr', 'config.cgpr')
    cp('prj_driver-windows.gpr', 'prj_driver.gpr')

Run(['gprbuild', '-p', '-q', 'prj.gpr'], output=None)

# test native without compiler packages
Run(['./main', '-P', 'prj.gpr'], output=None)

# test cross without compiler packages & no compiler installed
Run(['./main', '-P', 'prj_arm_eabi.gpr'], output=None)

# test using config's compiler package
Run(['./main', '--config', 'config.cgpr', '-P', 'prj.gpr'], output=None)

# test using project's compiler package
# under windows check also casing & executable extension support
Run(['./main', '-P', 'prj_driver.gpr'], output=None)

# test different compiler defined in project & config
Run(['./main', '--config', 'config.cgpr', '-P', 'prj_driver.gpr'], output=None)
Esempio n. 22
0
    def get_script_command_line(self) -> List[str]:
        """Return the command line to run the test script."""
        # Command line computation depends on the kind of script (Python or
        # shell).
        assert isinstance(self.env.discs, list)

        # Make sure the test script is present in the working directory
        assert self.test_control.opt_results is not None
        script_filename = self.test_control.opt_results["CMD"]
        self.script_file = self.working_dir("src", script_filename)
        if not os.path.isfile(self.script_file):
            raise TestAbortWithError(
                "cannot find script file {}".format(script_filename))

        _, ext = os.path.splitext(self.script_file)

        # Some tests have a _suffix in their extension. Using .startwith
        # ensures we don't treat ".cmd_x86" as ".sh".
        is_cmd = ext.startswith(".cmd")
        must_convert = is_cmd and (self.env.host.os.name != "windows"
                                   or "FORCE_SH" in self.env.discs)

        if ext == ".py":
            return [sys.executable, self.script_file]

        elif not is_cmd or must_convert:
            # If running a Bourne shell script, not running on Windows, or if
            # specifically asked to use a Bourne shell, create a shell script
            # to run instead of the given test script.
            new_script = []

            # First, make sure the current directory is in the PATH, to ease
            # running just-built programs in test scripts.
            new_script.append("PATH=.:$PATH; export PATH")

            # TODO: filesize_limit handling

            # If "self.env.support_dir" designates a directory that contains a
            # "support.sh" file, make the test script source it.
            support_dir = os.environ.get("TEST_SUPPORT_DIR", "")
            support_script = os.path.join(support_dir, "support.sh")
            if support_dir and os.path.isfile(support_script):
                new_script.append(". $TEST_SUPPORT_DIR/support.sh")

            # Read all lines in the original test script
            with open(self.script_file) as f:
                # Get rid of potential whitespaces and CR at the end of
                # each line.
                for line in f:
                    line = line.rstrip()
                    if must_convert:
                        # convert the "cmd" syntax to Bourne shell
                        for pattern, replacement in self.cmd_substitutions:
                            line = pattern.sub(replacement, line)
                    new_script.append(line)

            # Write the shell script and schedule its execution with "bash"
            new_script_filename = self.working_dir("__test.sh")
            with open(new_script_filename, "w") as f:
                for line in new_script:
                    f.write(line)
                    f.write("\n")
            return ["bash", new_script_filename]

        else:  # os-specific
            # We are running on Windows, so we can use "cmd" to run the
            # original script. Just make sure it has the correct extension.
            script_file = self.script_file
            if not script_file.endswith(".cmd"):
                script_file = self.working_dir("test__.cmd")
                cp(self.script_file, script_file)
            return ["cmd.exe", "/q", "/c", script_file]
Esempio n. 23
0
import os

from e3.env import Env
from e3.fs import cp
from testsuite_support.builder_and_runner import BuilderAndRunner, GPRINSTALL

if Env().host.platform.endswith('windows'):
    exeext = '.exe'
    cp('adactl', 'adactl' + exeext)
else:
    exeext = ""

p = BuilderAndRunner().run([
    GPRINSTALL, '-p', '--prefix=' + os.path.join(os.getcwd(), 'inst'),
    'inst.gpr'
])

if os.path.exists('inst/share/doc/gps/html/main.html'):
    print("OK main html")
else:
    print("NOK")

if os.path.exists('inst/bin/adactl' + exeext):
    print("OK adactl")
else:
    print("NOK")
Esempio n. 24
0
# Install lkql_checker.gpr only when needed (i.e. to run coverage analysis)
install_mode = "dev" if args.coverage else "usage"
run("gprinstall", f"--prefix={args.package_dir}", f"--mode={install_mode}",
    *common_args)

# Ship the rules
sync_tree(
    os.path.join(src_dir, "lkql_checker", "share"),
    os.path.join(args.package_dir, "share"),
    delete=False,
)

# Install the "lkql_repl.py" script
cp(
    os.path.join(src_dir, "lkql_repl.py"),
    os.path.join(src_dir, "bin"),
)

# Ship coverage data files for both liblkqllang and lkql_checker so that the
# testsuite can use them.
if args.coverage:
    instr_dir = os.path.join(os.path.join(args.package_dir, "instr"))

    def copy_sid_files(filenames, dirnames):
        """
        Copy SID files (``filenames``, a glob pattern) to each of the given
        directories (``dirnames``, a list of directories).
        """
        for d in dirnames:
            cp(filenames, d)