Beispiel #1
0
    def __init__(self, spec_dir, spec_config=None):
        """Initialize an AnodSpecRepository.

        :param spec_dir: directory containing the anod specs.
        :type spec_dir: str
        :param spec_config: dictionary containing the configuration for this
            AnodSpecRepository
        :type spec_config: dict
        """
        logger.debug('initialize spec repository (%s)', spec_dir)

        if not os.path.isdir(spec_dir):
            raise SandBoxError('spec directory %s does not exist' % spec_dir)
        self.spec_dir = spec_dir
        self.api_version = None
        self.specs = {}
        self.repos = {}

        # Look for all spec files and data files
        spec_list = {
            os.path.basename(k)[:-5]: {
                'path': k,
                'data': []
            }
            for k in ls(os.path.join(self.spec_dir, '*.anod'),
                        emit_log_record=False)
        }
        logger.debug('found %s specs', len(spec_list))

        data_list = [
            os.path.basename(k)[:-5]
            for k in ls(os.path.join(self.spec_dir, '*.yaml'),
                        emit_log_record=False)
        ]
        logger.debug('found %s yaml files', len(data_list))

        # Match yaml files with associated specifications
        for data in data_list:
            candidate_specs = [
                spec_file for spec_file in spec_list
                if data.startswith(spec_file)
            ]
            # We pick the longuest spec name
            candidate_specs.sort(key=lambda x: len(x))
            if candidate_specs:
                spec_list[candidate_specs[-1]]['data'].append(data)

        # Create AnodModule objects
        for name, value in spec_list.iteritems():
            self.specs[name] = AnodModule(name, **value)

        # Declare spec prolog
        prolog_file = os.path.join(spec_dir, 'prolog.py')
        self.prolog_dict = {
            'spec_config': spec_config,
            '__spec_repository': self
        }
        if os.path.exists(prolog_file):
            with open(prolog_file) as f:
                exec(compile(f.read(), prolog_file, 'exec'), self.prolog_dict)
Beispiel #2
0
    def __init__(self, spec_dir=None, spec_prolog=''):
        """Initialize an AnodSpecRepository.

        :param spec_dir: directory containing the anod specs. If None then
            parameters from previous instance will be used.
        :type spec_dir: str | None
        :param spec_prolog: python source code that should be prepended to
            each spec code
        :type spec_prolog: str
        """
        if spec_dir is None:
            assert self.spec_dir is not None, "repository not initialized"
            return

        logger.debug('initialize spec repository (%s)', spec_dir)

        # ??? The use of singleton should be reviewed in order to support
        # several spec repositories.
        if not os.path.isdir(spec_dir):
            raise SandBoxError(
                'spec directory %s does not exist' % spec_dir)
        self.__class__.spec_dir = spec_dir

        # Look for all spec files and data files
        spec_list = {os.path.basename(k)[:-5]: {'path': k, 'data': []}
                     for k in ls(os.path.join(self.spec_dir, '*.anod'),
                                 emit_log_record=False)}
        logger.debug('found %s specs', len(spec_list))

        data_list = [os.path.basename(k)[:-5] for k in
                     ls(os.path.join(self.spec_dir, '*.yaml'),
                        emit_log_record=False)]
        logger.debug('found %s yaml files', len(data_list))

        # Match yaml files with associated specifications
        for data in data_list:
            candidate_specs = [spec for spec in spec_list
                               if data.startswith(spec)]
            # We pick the longuest spec name
            candidate_specs.sort(key=lambda x: len(x))
            if not candidate_specs:
                logger.warning('no spec for data file %s.yaml', data)
            else:
                spec_list[candidate_specs[-1]]['data'].append(data)

        # Create AnodModule objects
        for name, value in spec_list.iteritems():
            self.specs[name] = AnodModule(name, **value)

        # Declare spec prolog
        self.spec_prolog = spec_prolog
Beispiel #3
0
    def make_minimal_copy_of_python_install(self,
                                            target_dir,
                                            exclude_modules=None):
        """Create a minimal copy of Python in target_dir.

        The goal of this method is to provide a minimal install that
        testcases can then tweak to their testing needs (e.g. manually
        remove some modules).

        PARAMETERS
            target_dir: The directory where the python install should
                be made. If the directory does not exist, it is
                automatically created.
            exclude_modules: If not None, an interable of module names
                that should be excluded from the install.
        """
        src_root_dir = os.path.dirname(os.path.dirname(sys.executable))
        # Make sure src_root_dir is not a symbolic link. Otherwise,
        # sync_tree fails.
        src_root_dir = os.path.realpath(src_root_dir)

        # Only copy the bare minimum of the Python install corresponding
        # to the current interpreter...
        file_list = []
        # ... all bin/python* files...
        file_list.extend(
            os.path.relpath(p, src_root_dir)
            for p in ls(os.path.join(src_root_dir, "bin", "python*")))
        # ... all lib/*python* files and directories (e.g.
        # lib/libpythonX.Y.a or the lib/pythonX.Y/ directory).
        file_list.extend(
            os.path.relpath(p, src_root_dir)
            for p in ls(os.path.join(src_root_dir, "lib", "*python*")))

        if exclude_modules is None:
            ignore = None
        else:
            ignore = []
            site_pkg_dir = os.path.join(src_root_dir, "lib", "python*",
                                        "site-packages")
            for module_name in exclude_modules:
                ignore.extend(
                    os.path.relpath(p, src_root_dir)
                    for p in ls(os.path.join(site_pkg_dir, f"{module_name}*")))

        sync_tree(
            source=src_root_dir,
            target=target_dir,
            ignore=ignore,
            file_list=file_list,
        )
Beispiel #4
0
    def __process_one_test(self, qde):
        frame(text=("report check for xfile = %s\n" % qde.xfile +
                    "drivers = %s" % str(qde.drivers)),
              char='~').display()

        # Count the number of expected exemption regions
        xregions = 0
        for source_xrn in qde.xrnotes.values():
            for kind in xNoteKinds:
                xregions += len(source_xrn[kind])

        # We're looking at the last report produced, with the last
        # applicable xcov-level
        self.__setup_expectations(ntraces=len(qde.drivers),
                                  xcovlevel=xcovlevel_from[os.path.basename(
                                      qde.wdir)[0:3]],
                                  xregions=xregions)

        reports = ls(os.path.join(qde.wdir, "test.rep"))

        thistest.fail_if(
            len(reports) != 1, "expected 1 report, found %d" % len(reports))

        self.report = Tfile(reports[0], self.__process_line)
        for rpe in self.rpElements:
            rpe.check()
    def get_test_list(self, sublist):
        # The tests are one per subdir of "ada_lsp"
        if sublist:
            dirs = [
                os.path.abspath(os.path.join(self.test_dir, '..', s))
                for s in sublist
            ]
        else:
            dirs = ls(os.path.join(self.test_dir, '*'))
        results = []
        for d in dirs:
            if os.path.isdir(d):
                # Create the test.yamls if they don't exist!
                yaml = os.path.join(d, 'test.yaml')
                basename = os.path.basename(d)

                if not os.path.exists(yaml):
                    with open(yaml, 'wb') as f:
                        logging.info("creating {} for you :-)".format(yaml))
                        f.write("title: '{}'\n".format(basename))
                results.append(os.path.join(basename, 'test.yaml'))

        logging.info('Found %s tests %s', len(results), results)
        logging.debug("tests:\n  " + "\n  ".join(results))
        return results
Beispiel #6
0
    def run_postinstall(self):
        """Run cygwin postinstall scripts"""
        with open(self.cyglog('postinstall.log'), 'wb') as fd:
            os.chdir(self.root_dir)

            # Compute the list of postinstall scripts
            pscripts = []
            for ext in ('.sh', '.bat', '.cmd', '.dash'):
                pscripts += ls('etc/postinstall/*' + ext)
            pscripts.sort()

            # Set some env variables needed by the postinstall scripts
            os.environ['SHELL'] = '/bin/bash'
            os.environ['CYGWINROOT'] = self.root_dir
            os.environ['TERM'] = 'dump'
            for p in (('usr', 'bin'), ('bin', ), ('usr', 'sbin'), ('sbin', )):
                os.environ['PATH'] = os.path.join(self.root_dir, *p) + ';' + \
                    os.environ['PATH']

            # run postinstall scripts
            for index, ps in enumerate(pscripts):
                logging.info('Run postinstall (%s/%s) %s' %
                             (index, len(pscripts), os.path.basename(ps)))
                fd.write('run %s\n' % ps)

                if ps.endswith('.dash'):
                    Run([os.path.join(self.root_dir, 'bin', 'dash.exe'), ps],
                        output=fd)
                elif ps.endswith('.sh'):
                    Run([os.path.join(self.root_dir, 'bin', 'bash.exe'),
                         '--norc', '--noprofile', ps], output=fd)
                mv(ps, ps + '.done')
Beispiel #7
0
def check_valid(options, args):

    # We are producing qualification material. Better know what we're
    # aiming at, always:

    exit_if(not options.dolevel,
            "Please specify an explicit dolevel (--dolevel).")

    # Generating docs can be pretty long. Better make sure the output format
    # was intentionally stated:

    exit_if(not options.docformat,
            ("Please specify the desired output format (--docformat)."))

    # Likewise for the git branch name:

    exit_if(not options.branchname,
            ("Please specify the git branch name (--branch)."))

    # Convey whether we are requested to produce a kit:

    options.kitp = options.rekit or not options.parts

    # Settle on the set of documents we are to produce:

    options.parts = (valid_parts
                     if not options.parts else options.parts.split(','))

    [exit_if (
            part not in valid_parts,
            "Requested part '%s' is invalid, none of %s" \
                % (part, valid_parts.__str__())
            )
     for part in options.parts]

    # work dir

    exit_if(not options.workdir, "A work dir must be specified (--work-dir)")

    warn_if(
        options.kitp and os.path.exists(options.workdir)
        and ls("%s/*" % options.workdir),
        "producing kit within non empty workdir")

    # Producing a STR requires a testsuite dir

    exit_if('str' in options.parts and not options.testsuite_dir,
            "--testsuite-dir required when producing a STR")

    # GIT aspects:

    exit_if(
        options.gitpull and options.gitsource,
        "Specifying git source is incompatible with "
        "request to pull from current origin")

    # In case we produce TOR/LRM traceability matrixes ...

    exit_if(not options.languages,
            "Please specify the qualified languages (--languages)")
Beispiel #8
0
    def __expand_shared_controllers(self, drivers, cspecs):
        """Search and expand possible shared drivers and/or consolidation
        specs uptree for our local functional units."""

        # Shared drivers would be <updir>/test_<xx>*.(adb|c) for some possible
        # updir and every <xx> such that there is a src/<xx>*.(adb|c).
        #
        # Likewise for consolidation specs, as <updir>/cons_<xx>*.txt
        #
        # Gather *set* of <xx> candidates first, then expand the associated
        # possible lists of drivers (each maybe empty). Beware not to include
        # child or sub units, as these dont mirror as such in the set of test
        # drivers.
        sxx = set(
            srcmatch.group(1) for srcmatch in (
                re.match(r"([a-z_]*).*\.(adb|c)", os.path.basename(src))
                for src in ls("src/*")) if srcmatch)

        # If there is no candidate body in src/, arrange to run all the
        # drivers. This is useful for test groups on GPR variations for
        # example, where we typically want to run all the drivers and check
        # the analysis results against different sets of SCOS.
        if not sxx:
            sxx = [""]

        for body in sxx:
            for prefix in ("../" * n for n in range(1, thistest.depth)):
                if drivers:
                    self.__expand_drivers(
                        "%(p)ssrc/test_%(b)s*.adb %(p)ssrc/test_%(b)s*.c" % {
                            'p': prefix,
                            'b': body
                        })
                if cspecs:
                    self.__expand_cspecs("%ssrc/cons_%s*.txt" % (prefix, body))
Beispiel #9
0
    def __init__(self, dotxcov_pattern):

        # xcov --annotate=xcov produces a set of .xcov annotated unit sources,
        # each featuring a synthetic note per line.

        self.elnotes = {}
        for dotxcov in ls(dotxcov_pattern):
            self.listing_to_enotes(dotxcov)
Beispiel #10
0
 def load_series_from_dir(cls, path: str) -> 'ModelDatabase':
     """Load all series as xml files from a given directory.
     
     :param path: the directory containing the list of xml models
     :type path: str
     :return: a model database
     :rtype: pylmcp.model.ModelDatabase
     """
     model_db = cls()
     for xml_model in ls(os.path.join(path, '*.xml')):
         model_db.add_series(Series.load_model(xml_model,
                                               model_db=model_db))
     return model_db
Beispiel #11
0
    def update_mount_points(self):
        logging.info('update moint points')
        fd = open(os.path.join(self.root_dir, "etc", "fstab"), "w")
        mount_list = ls('c:\*')

        mount_list = [k for k in mount_list if
                      os.path.isdir(k) and self.should_be_mounted(k.lower())]
        mount_list = [k.replace('\\', '/').replace(' ', '\\040')[2:]
                      for k in mount_list]

        fd.write("none /cygdrive cygdrive binary,posix=0,user 0 0\n")
        for k in mount_list:
            if k.startswith('/' + Env().host.machine) or \
                    k == '/gnatmail' or k == '/it':
                fd.write("c:%s %s ntfs binary,posix=0,noacl 0 0\n" % (k, k))
            else:
                fd.write("c:%s %s ntfs binary,posix=0 0 0\n" % (k, k))
        fd.close()
    def build(self, prev, slot):
        self.logger = logging.getLogger(f"test.{self.test_env['test_name']}")

        env = {
            "TEST_SOURCES": self.test_source_dir,
            "SUPPORT_SOURCES": self.support_source_dir,
        }

        mkdir(self.build_dir)
        py_files = ls(os.path.join(self.test_source_dir, "*.py"))
        if py_files:
            cp(py_files, self.build_dir)
        check_call(
            self,
            [
                "gprbuild", "-P", self.project_file, "--relocate-build-tree",
                "-p"
            ],
            cwd=self.build_dir,
            timeout=300,
            env=env,
            ignore_environ=False,
        )
Beispiel #13
0
    def __init__(self, spec_dir, spec_config=None):
        """Initialize an AnodSpecRepository.

        :param spec_dir: directory containing the anod specs.
        :type spec_dir: str
        :param spec_config: dictionary containing the configuration for this
            AnodSpecRepository
        :type spec_config: dict | SandboxConfig
        """
        logger.debug('initialize spec repository (%s)', spec_dir)

        if not os.path.isdir(spec_dir):
            raise SandBoxError(
                'spec directory %s does not exist' % spec_dir)
        self.spec_dir = spec_dir
        self.api_version = __version__
        self.specs = {}
        self.repos = {}

        # Look for all spec files and data files
        spec_list = {os.path.basename(os.path.splitext(k)[0]): {'path': k,
                                                                'data': []}
                     for k in ls(os.path.join(self.spec_dir, '*.anod'),
                                 emit_log_record=False)}
        logger.debug('found %s specs', len(spec_list))

        # API == 1.4
        yaml_files = ls(os.path.join(self.spec_dir, '*.yaml'),
                        emit_log_record=False)
        data_list = [os.path.basename(k)[:-5] for k in yaml_files]
        logger.debug('found %s yaml files API 1.4 compatible', len(data_list))

        # Match yaml files with associated specifications
        for data in data_list:
            candidate_specs = [spec_file for spec_file in spec_list
                               if data.startswith(spec_file)]
            # We pick the longuest spec name
            candidate_specs.sort(key=len)
            if candidate_specs:
                spec_list[candidate_specs[-1]]['data'].append(data)

        # Find yaml files that are API >= 1.5 compatible
        new_yaml_files = ls(os.path.join(self.spec_dir, '*', '*.yaml'),
                            emit_log_record=False)

        for yml_f in new_yaml_files:
            associated_spec = os.path.basename(os.path.dirname(yml_f))

            # Keep only the yaml files associated with an .anod file
            if associated_spec in spec_list:
                # We're recording the relative path without the extension
                suffix, _ = os.path.splitext(os.path.basename(yml_f))

                spec_list[associated_spec]['data'].append(
                    os.path.join(associated_spec, suffix))

        # Create AnodModule objects
        for name, value in spec_list.iteritems():
            self.specs[name] = AnodModule(name, **value)

        # Declare spec prolog
        prolog_file = os.path.join(spec_dir, 'prolog.py')
        self.prolog_dict = {'spec_config': spec_config,
                            '__spec_repository': self}
        if os.path.exists(prolog_file):
            with open(prolog_file) as f:
                exec(compile(f.read(), prolog_file, 'exec'),
                     self.prolog_dict)
Beispiel #14
0
 def __expand_cspecs(self, patterns):
     """Add to the list of consolidation specs to exercize the set of files
     corresponding to every glob pattern in PATTERNS."""
     for p in to_list(patterns):
         self.all_cspecs.extend(ls(p))
Beispiel #15
0
 def __expand_drivers(self, patterns):
     """Add to the list of drivers to exercize the set of files
     corresponding to every glob pattern in PATTERNS."""
     for p in to_list(patterns):
         self.all_drivers.extend(ls(p))
Beispiel #16
0
    def __init__(self, spec_dir: str, spec_config: Optional[dict] = None):
        """Initialize an AnodSpecRepository.

        :param spec_dir: directory containing the anod specs.
        :param spec_config: dictionary containing the configuration for this
            AnodSpecRepository
        """
        logger.debug("initialize spec repository (%s)", spec_dir)

        if not os.path.isdir(spec_dir):
            raise SandBoxError("spec directory %s does not exist" % spec_dir)
        self.spec_dir = spec_dir
        self.api_version = __version__
        self.specs = {}
        self.repos: Dict[str, Dict[str, str]] = {}

        # Look for all spec files and data files
        spec_list = {
            os.path.basename(os.path.splitext(k)[0]): {
                "path": k,
                "data": []
            }
            for k in ls(os.path.join(self.spec_dir, "*.anod"),
                        emit_log_record=False)
        }
        logger.debug("found %s specs", len(spec_list))

        # API == 1.4
        yaml_files = ls(os.path.join(self.spec_dir, "*.yaml"),
                        emit_log_record=False)
        data_list = [os.path.basename(k)[:-5] for k in yaml_files]
        logger.debug("found %s yaml files API 1.4 compatible", len(data_list))

        # Match yaml files with associated specifications
        for data in data_list:
            candidate_specs = [
                spec_file for spec_file in spec_list
                if data.startswith(spec_file)
            ]
            # We pick the longuest spec name
            candidate_specs.sort(key=len)
            if candidate_specs:
                spec_list[candidate_specs[-1]]["data"].append(
                    data)  # type: ignore

        # Find yaml files that are API >= 1.5 compatible
        new_yaml_files = ls(os.path.join(self.spec_dir, "*", "*.yaml"),
                            emit_log_record=False)

        for yml_f in new_yaml_files:
            associated_spec = os.path.basename(os.path.dirname(yml_f))

            # Keep only the yaml files associated with an .anod file
            if associated_spec in spec_list:
                # We're recording the relative path without the extension
                suffix, _ = os.path.splitext(os.path.basename(yml_f))

                spec_list[associated_spec]["data"].append(  # type: ignore
                    os.path.join(associated_spec, suffix))

        # Create AnodModule objects
        for name, value in spec_list.items():
            self.specs[name] = AnodModule(name, **value)  # type: ignore

        # Load config/repositories.yaml
        repo_file = os.path.join(self.spec_dir, "config", "repositories.yaml")
        if os.path.isfile(repo_file):
            with open(repo_file) as fd:
                self.repos = yaml.safe_load(fd)

        # Declare spec prolog
        prolog_file = os.path.join(spec_dir, "prolog.py")
        self.prolog_dict = {
            "spec_config": spec_config,
            "__spec_repository": self
        }
        if os.path.exists(prolog_file):
            with open(prolog_file) as f:
                exec(compile(f.read(), prolog_file, "exec"), self.prolog_dict)
Beispiel #17
0
    def __init__(
        self,
        spec_dir: str,
        spec_config: Any = None,
        # Ideally should be spec_config: Optional[SpecConfig] = None,
        # We keep it to Any to avoid mypy issues on other projects
        extra_repositories_config: Optional[dict] = None,
    ):
        """Initialize an AnodSpecRepository.

        :param spec_dir: directory containing the anod specs.
        :param spec_config: dictionary containing the configuration for this
            AnodSpecRepository
        :param extra_repositories_config: first read the configuration from
            <spec_dir>/config/repositories.yaml and update the result with
            extra_repositories_config
        """
        logger.debug("initialize spec repository (%s)", spec_dir)

        if not os.path.isdir(spec_dir):
            raise SandBoxError(f"spec directory {spec_dir} does not exist")
        self.spec_dir = spec_dir
        self.api_version = __version__
        self.specs = {}
        self.repos: Dict[str, Dict[str, str]] = {}

        # Look for all spec files and data files
        spec_list = {
            os.path.basename(os.path.splitext(k)[0]): {
                "path": k,
                "data": []
            }
            for k in ls(os.path.join(self.spec_dir, "*.anod"),
                        emit_log_record=False)
        }
        logger.debug("found %s specs", len(spec_list))

        # API == 1.4
        yaml_files = ls(os.path.join(self.spec_dir, "*.yaml"),
                        emit_log_record=False)
        data_list = [os.path.basename(k)[:-5] for k in yaml_files]
        logger.debug("found %s yaml files API 1.4 compatible", len(data_list))

        # Match yaml files with associated specifications
        for data in data_list:
            candidate_specs = [
                spec_file for spec_file in spec_list
                if data.startswith(spec_file)
            ]
            # We pick the longuest spec name
            candidate_specs.sort(key=len)
            if candidate_specs:
                spec_list[candidate_specs[-1]]["data"].append(
                    data)  # type: ignore

        # Find yaml files that are API >= 1.5 compatible
        new_yaml_files = ls(os.path.join(self.spec_dir, "*", "*.yaml"),
                            emit_log_record=False)

        for yml_f in new_yaml_files:
            associated_spec = os.path.basename(os.path.dirname(yml_f))

            # Keep only the yaml files associated with an .anod file
            if associated_spec in spec_list:
                # We're recording the relative path without the extension
                suffix, _ = os.path.splitext(os.path.basename(yml_f))

                spec_list[associated_spec]["data"].append(  # type: ignore
                    os.path.join(associated_spec, suffix))

        # Create AnodModule objects
        for name, value in spec_list.items():
            self.specs[name] = AnodModule(name, **value)  # type: ignore

        # Load config/repositories.yaml
        repo_file = os.path.join(self.spec_dir, "config", "repositories.yaml")
        if os.path.isfile(repo_file):
            with open(repo_file) as fd:
                self.repos = yaml.safe_load(fd)

        if extra_repositories_config:
            for repo_name, repo_data in extra_repositories_config.items():
                if repo_name in self.repos:
                    self.repos[repo_name].update(repo_data)
                else:
                    self.repos[repo_name] = repo_data

        # Make sure that all revision are strings and not floats
        for repo_conf in self.repos.values():
            if "revision" in repo_conf:
                repo_conf["revision"] = str(repo_conf["revision"])

        if spec_config is None:
            spec_config = SpecConfig()
        spec_config.spec_dir = self.spec_dir
        spec_config.repositories = self.repos

        # Declare spec prolog
        prolog_file = os.path.join(spec_dir, "prolog.py")
        self.prolog_dict = {
            "spec_config": spec_config,
            "__spec_repository": self
        }
        if os.path.exists(prolog_file):
            with open(prolog_file) as f:
                exec(compile(f.read(), prolog_file, "exec"), self.prolog_dict)