Beispiel #1
0
    def sig_explicit_deps(self):
        """
		Used by :py:meth:`waflib.Task.Task.signature`, hash :py:attr:`waflib.Task.Task.inputs`
		and :py:attr:`waflib.Task.Task.dep_nodes` signatures.

		:rtype: hash value
		"""
        bld = self.generator.bld
        bld_sigs = []
        exp_output = ''

        # the inputs
        for x in self.inputs + self.dep_nodes:
            try:
                bld_sig = x.get_bld_sig()
                if Logs.sig_delta:
                    exp_output += '{} {} {}\n'.format(x.name, x.abspath(),
                                                      hexlify(bld_sig))
                bld_sigs.append(bld_sig)
            except (AttributeError, TypeError, IOError):
                Logs.warn('Missing signature for node %r (required by %r)' %
                          (x, self))
                continue  # skip adding the signature to the calculation, but continue adding other dependencies

        # manual dependencies, they can slow down the builds
        if bld.deps_man:
            additional_deps = bld.deps_man
            for x in self.inputs + self.outputs:
                try:
                    d = additional_deps[id(x)]
                except KeyError:
                    continue

                for v in d:
                    v_name = v.name
                    if isinstance(v, bld.root.__class__):
                        try:
                            v = v.get_bld_sig()
                        except AttributeError:
                            raise Errors.WafError(
                                'Missing node signature for %r (required by %r)'
                                % (v, self))
                    elif hasattr(v, '__call__'):
                        v = v()  # dependency is a function, call it
                    if Logs.sig_delta:
                        exp_output += '{} {}\n'.format(v_name, hexlify(v))
                    bld_sigs.append(v)

        dep_bld_sigs_str = "".join(bld_sigs)

        m = Utils.md5()
        m.update(dep_bld_sigs_str)
        explicit_sig = m.digest()

        if Logs.sig_delta:
            key = self.uid()
            prev_sig = bld.task_sigs.get((key, 'exp'), [])
            if prev_sig and prev_sig != explicit_sig:
                self.capture_signature_log('\nExplicit(Old):\n')
                self.capture_signature_log(bld.last_build['exp_deps'].get(
                    key, ''))
                self.capture_signature_log('\nExplicit(New):\n')
                self.capture_signature_log(exp_output)
            bld.last_build['exp_deps'][key] = exp_output
            bld.task_sigs[(key, 'exp')] = explicit_sig

        return explicit_sig
Beispiel #2
0
    def cmd_and_log(self, cmd, **kw):
        """
		Executes a process and returns stdout/stderr if the execution is successful.
		An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
		will be bound to the WafError object (configuration tests)::

			def configure(conf):
				out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
				(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
				(out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT)
				try:
					conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
				except Errors.WafError as e:
					print(e.stdout, e.stderr)

		:param cmd: args for subprocess.Popen
		:type cmd: list or string
		:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
		:type kw: dict
		:returns: a tuple containing the contents of stdout and stderr
		:rtype: string
		:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
		:raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object
		"""
        subprocess = Utils.subprocess
        kw['shell'] = isinstance(cmd, str)
        self.log_command(cmd, kw)

        if 'quiet' in kw:
            quiet = kw['quiet']
            del kw['quiet']
        else:
            quiet = None

        if 'output' in kw:
            to_ret = kw['output']
            del kw['output']
        else:
            to_ret = STDOUT

        if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
            raise Errors.WafError('Program %r not found!' % cmd[0])

        kw['stdout'] = kw['stderr'] = subprocess.PIPE
        if quiet is None:
            self.to_log(cmd)

        cargs = {}
        if 'timeout' in kw:
            if sys.hexversion >= 0x3030000:
                cargs['timeout'] = kw['timeout']
                if not 'start_new_session' in kw:
                    kw['start_new_session'] = True
            del kw['timeout']
        if 'input' in kw:
            if kw['input']:
                cargs['input'] = kw['input']
                kw['stdin'] = subprocess.PIPE
            del kw['input']

        if 'cwd' in kw:
            if not isinstance(kw['cwd'], str):
                kw['cwd'] = kw['cwd'].abspath()

        try:
            ret, out, err = Utils.run_process(cmd, kw, cargs)
        except Exception as e:
            raise Errors.WafError('Execution failure: %s' % str(e), ex=e)

        if not isinstance(out, str):
            out = out.decode(sys.stdout.encoding or 'latin-1',
                             errors='replace')
        if not isinstance(err, str):
            err = err.decode(sys.stdout.encoding or 'latin-1',
                             errors='replace')

        if out and quiet != STDOUT and quiet != BOTH:
            self.to_log('out: %s' % out)
        if err and quiet != STDERR and quiet != BOTH:
            self.to_log('err: %s' % err)

        if ret:
            e = Errors.WafError('Command %r returned %r' % (cmd, ret))
            e.returncode = ret
            e.stderr = err
            e.stdout = out
            raise e

        if to_ret == BOTH:
            return (out, err)
        elif to_ret == STDERR:
            return err
        return out
Beispiel #3
0
    def post(self):
        """
		Creates tasks for this task generators. The following operations are performed:

		#. The body of this method is called only once and sets the attribute ``posted``
		#. The attribute ``features`` is used to add more methods in ``self.meths``
		#. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec`
		#. The methods are then executed in order
		#. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
		"""
        if getattr(self, 'posted', None):
            return False
        self.posted = True

        keys = set(self.meths)
        keys.update(feats['*'])

        # add the methods listed in the features
        self.features = Utils.to_list(self.features)
        for x in self.features:
            st = feats[x]
            if st:
                keys.update(st)
            elif not x in Task.classes:
                Logs.warn(
                    'feature %r does not exist - bind at least one method to it?',
                    x)

        # copy the precedence table
        prec = {}
        prec_tbl = self.prec
        for x in prec_tbl:
            if x in keys:
                prec[x] = prec_tbl[x]

        # elements disconnected
        tmp = []
        for a in keys:
            for x in prec.values():
                if a in x:
                    break
            else:
                tmp.append(a)

        tmp.sort(reverse=True)

        # topological sort
        out = []
        while tmp:
            e = tmp.pop()
            if e in keys:
                out.append(e)
            try:
                nlst = prec[e]
            except KeyError:
                pass
            else:
                del prec[e]
                for x in nlst:
                    for y in prec:
                        if x in prec[y]:
                            break
                    else:
                        tmp.append(x)
                        tmp.sort(reverse=True)

        if prec:
            buf = ['Cycle detected in the method execution:']
            for k, v in prec.items():
                buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
            raise Errors.WafError('\n'.join(buf))
        self.meths = out

        # then we run the methods in order
        Logs.debug('task_gen: posting %s %d', self, id(self))
        for x in out:
            try:
                v = getattr(self, x)
            except AttributeError:
                raise Errors.WafError(
                    '%r is not a valid task generator method' % x)
            Logs.debug('task_gen: -> %s (%d)', x, id(self))
            v()

        Logs.debug('task_gen: posted %s', self.name)
        return True
    def cmd_and_log(self, cmd, **kw):
        """
        Executes a process and returns stdout/stderr if the execution is successful.
        An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
        will be bound to the WafError object (configuration tests)::

                def configure(conf):
                        out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
                        (out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
                        (out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT)
                        try:
                                conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
                        except Errors.WafError as e:
                                print(e.stdout, e.stderr)

        :param cmd: args for subprocess.Popen
        :type cmd: list or string
        :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
        :type kw: dict
        :returns: a tuple containing the contents of stdout and stderr
        :rtype: string
        :raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
        :raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object
        """
        subprocess = Utils.subprocess
        kw["shell"] = isinstance(cmd, str)
        self.log_command(cmd, kw)

        quiet = kw.pop("quiet", None)
        to_ret = kw.pop("output", STDOUT)

        if Logs.verbose and not kw["shell"] and not Utils.check_exe(cmd[0]):
            raise Errors.WafError("Program %r not found!" % cmd[0])

        kw["stdout"] = kw["stderr"] = subprocess.PIPE
        if quiet is None:
            self.to_log(cmd)

        cargs = {}
        if "timeout" in kw:
            if sys.hexversion >= 0x3030000:
                cargs["timeout"] = kw["timeout"]
                if not "start_new_session" in kw:
                    kw["start_new_session"] = True
            del kw["timeout"]
        if "input" in kw:
            if kw["input"]:
                cargs["input"] = kw["input"]
                kw["stdin"] = subprocess.PIPE
            del kw["input"]

        if "cwd" in kw:
            if not isinstance(kw["cwd"], str):
                kw["cwd"] = kw["cwd"].abspath()

        encoding = kw.pop("decode_as", default_encoding)

        try:
            ret, out, err = Utils.run_process(cmd, kw, cargs)
        except Exception as e:
            raise Errors.WafError("Execution failure: %s" % str(e), ex=e)

        if not isinstance(out, str):
            out = out.decode(encoding, errors="replace")
        if not isinstance(err, str):
            err = err.decode(encoding, errors="replace")

        if out and quiet != STDOUT and quiet != BOTH:
            self.to_log("out: %s" % out)
        if err and quiet != STDERR and quiet != BOTH:
            self.to_log("err: %s" % err)

        if ret:
            e = Errors.WafError(f"Command {cmd!r} returned {ret!r}")
            e.returncode = ret
            e.stderr = err
            e.stdout = out
            raise e

        if to_ret == BOTH:
            return (out, err)
        elif to_ret == STDERR:
            return err
        return out
Beispiel #5
0
    def recurse(self,
                dirs,
                name=None,
                mandatory=True,
                once=True,
                encoding=None):
        """
		Runs user-provided functions from the supplied list of directories.
		The directories can be either absolute, or relative to the directory
		of the wscript file

		The methods :py:meth:`waflib.Context.Context.pre_recurse` and
		:py:meth:`waflib.Context.Context.post_recurse` are called immediately before
		and after a script has been executed.

		:param dirs: List of directories to visit
		:type dirs: list of string or space-separated string
		:param name: Name of function to invoke from the wscript
		:type  name: string
		:param mandatory: whether sub wscript files are required to exist
		:type  mandatory: bool
		:param once: read the script file once for a particular context
		:type once: bool
		"""
        try:
            cache = self.recurse_cache
        except AttributeError:
            cache = self.recurse_cache = {}

        for d in Utils.to_list(dirs):

            if not os.path.isabs(d):
                # absolute paths only
                d = os.path.join(self.path.abspath(), d)

            WSCRIPT = os.path.join(d, WSCRIPT_FILE)
            WSCRIPT_FUN = WSCRIPT + '_' + (name or self.fun)

            node = self.root.find_node(WSCRIPT_FUN)
            if node and (not once or node not in cache):
                cache[node] = True
                self.pre_recurse(node)
                try:
                    function_code = node.read('rU', encoding)
                    exec(compile(function_code, node.abspath(), 'exec'),
                         self.exec_dict)
                finally:
                    self.post_recurse(node)
            elif not node:
                node = self.root.find_node(WSCRIPT)
                tup = (node, name or self.fun)
                if node and (not once or tup not in cache):
                    cache[tup] = True
                    self.pre_recurse(node)
                    try:
                        wscript_module = load_module(node.abspath(),
                                                     encoding=encoding)
                        user_function = getattr(wscript_module,
                                                (name or self.fun), None)
                        if not user_function:
                            if not mandatory:
                                continue
                            raise Errors.WafError(
                                'No function %r defined in %s' %
                                (name or self.fun, node.abspath()))
                        user_function(self)
                    finally:
                        self.post_recurse(node)
                elif not node:
                    if not mandatory:
                        continue
                    try:
                        os.listdir(d)
                    except OSError:
                        raise Errors.WafError('Cannot read the folder %r' % d)
                    raise Errors.WafError('No wscript file in directory %s' %
                                          d)
def multicheck(self, *k, **kw):
    """
	Use tuples to perform parallel configuration tests
	"""
    self.start_msg(kw.get('msg', 'Executing %d configuration tests' % len(k)),
                   **kw)

    class par(object):
        def __init__(self):
            self.keep = False
            self.returned_tasks = []
            self.task_sigs = {}
            self.progress_bar = 0

        def total(self):
            return len(tasks)

        def to_log(self, *k, **kw):
            return

    bld = par()
    tasks = []
    for dct in k:
        x = cfgtask(bld=bld)
        tasks.append(x)
        x.args = dct
        x.bld = bld
        x.conf = self
        x.args = dct

        # bind a logger that will keep the info in memory
        x.logger = Logs.make_mem_logger(str(id(x)), self.logger)

    def it():
        yield tasks
        while 1:
            yield []

    p = Runner.Parallel(bld, Options.options.jobs)
    p.biter = it()
    p.start()

    # flush the logs in order into the config.log
    for x in tasks:
        x.logger.memhandler.flush()

    if p.error:
        for x in p.error:
            if getattr(x, 'err_msg', None):
                self.to_log(x.err_msg)
                self.end_msg('fail', color='RED')
                raise Errors.WafError(
                    'There is an error in the library, read config.log for more information'
                )

    for x in tasks:
        if x.hasrun != Task.SUCCESS:
            self.end_msg(kw.get('errmsg', 'no'), color='YELLOW', **kw)
            self.fatal(
                kw.get('fatalmsg', None) or
                'One of the tests has failed, read config.log for more information'
            )

    self.end_msg('ok', **kw)
Beispiel #7
0
 def __copy__(self):
     raise Errors.WafError('build contexts cannot be copied')
Beispiel #8
0
 def _alias_not_enabled_callback(alias_key, roles):
     error_message = "3rd Party alias '{}' specified in {} is not enabled. Make sure that at least one of the " \
                     "following roles is enabled: [{}]".format(alias_key, file, ', '.join(roles))
     raise Errors.WafError(error_message)
Beispiel #9
0
    def _process_uber_dict(uber_section, uber_dict):
        """
        Process each uber dictionary value
        """
        processed_uber_dict = {}

        for filter_name, filter_contents in uber_dict.items():
            for filter_content in filter_contents:

                if isinstance(filter_content, str):

                    if '*' in filter_content or '?' in filter_content:
                        # If this is a raw glob pattern, stuff it into the expected glob dictionary
                        _process_glob_entry(dict(pattern=filter_content),
                                            filter_name, processed_uber_dict)
                    elif filter_content.startswith('@ENGINE@'):
                        file_path = os.path.normpath(
                            filter_content.replace('@ENGINE@',
                                                   bld.engine_path))
                        if not os.path.exists(file_path):
                            Logs.warn(
                                "[WARN] File '{}' specified in '{}' does not exist.  It will be ignored"
                                .format(file_path, waf_file_node_abs))
                        else:
                            if filter_name not in processed_uber_dict:
                                processed_uber_dict[filter_name] = []
                            processed_uber_dict[filter_name].append(
                                filter_content)
                            dup_set.add(file_path)
                    else:
                        # This is a straight up file reference.
                        # Do any processing on an aliased reference
                        if filter_content.startswith('@'):
                            processed_path = bld.PreprocessFilePath(
                                filter_content, _invalid_alias_callback,
                                _alias_not_enabled_callback)
                        else:
                            processed_path = os.path.normpath(
                                os.path.join(base_path_abs, filter_content))

                        if not os.path.exists(processed_path):
                            Logs.warn(
                                "[WARN] File '{}' specified in '{}' does not exist.  It will be ignored"
                                .format(processed_path, waf_file_node_abs))
                        elif not os.path.isfile(processed_path):
                            Logs.warn(
                                "[WARN] Path '{}' specified in '{}' is a folder, only files or glob patterns are "
                                "allowed.  It will be ignored".format(
                                    processed_path, waf_file_node_abs))
                        elif processed_path in dup_set:
                            Logs.warn(
                                "[WARN] File '{}' specified in '{}' is a duplicate.  It will be ignored"
                                .format(processed_path, waf_file_node_abs))
                        else:
                            if filter_name not in processed_uber_dict:
                                processed_uber_dict[filter_name] = []
                            processed_uber_dict[filter_name].append(
                                processed_path)
                            dup_set.add(processed_path)

                elif isinstance(filter_content, dict):
                    # Dictionaries automatically go through the glob pattern working
                    _process_glob_entry(filter_content, filter_name,
                                        processed_uber_dict)
                else:
                    raise Errors.WafError(
                        "Invalid entry '{}' in file '{}', section '{}/{}'".
                        format(filter_content, file, uber_section,
                               filter_name))

        return _clear_empty_uber_dict(processed_uber_dict)
Beispiel #10
0
def read_file_list(bld, file):
    """
    Read and process a file list file (.waf_file) and manage duplicate files and possible globbing patterns to prepare
    the list for injestion by the project

    :param bld:     The build context
    :param file:    The .waf_file file list to process
    :return:        The processed list file
    """

    if not os.path.isfile(os.path.join(bld.path.abspath(), file)):
        raise Errors.WafError(
            "Invalid waf file list file: {}.  File not found.".format(file))

    def _invalid_alias_callback(alias_key):
        error_message = "Invalid alias '{}' specified in {}".format(
            alias_key, file)
        raise Errors.WafError(error_message)

    def _alias_not_enabled_callback(alias_key, roles):
        error_message = "3rd Party alias '{}' specified in {} is not enabled. Make sure that at least one of the " \
                        "following roles is enabled: [{}]".format(alias_key, file, ', '.join(roles))
        raise Errors.WafError(error_message)

    # Manage duplicate files and glob hits
    dup_set = set()
    glob_hits = 0

    waf_file_node = bld.path.make_node(file)
    waf_file_node_abs = waf_file_node.abspath()
    base_path_abs = waf_file_node.parent.abspath()

    if not os.path.exists(waf_file_node_abs):
        raise Errors.WafError(
            'Invalid WAF file list: {}'.format(waf_file_node_abs))

    def _determine_vs_filter(input_rel_folder_path, input_filter_name,
                             input_filter_pattern):
        """
        Calculate the vvs filter based on the resulting relative path, the input filter name,
        and the pattern used to derive the input relative path
        """
        vs_filter = input_filter_name
        if len(input_rel_folder_path) > 0:
            # If the resulting relative path has a subfolder, the base the filter on the following conditions
            if input_filter_name.lower() == 'root':
                # This is the root folder, use the relative folder subpath as the filter
                vs_filter = input_rel_folder_path
            else:
                # This is a named filter, the filter will place all results under this filter
                pattern_dirname = os.path.dirname(input_filter_pattern)
                if len(pattern_dirname) > 0:
                    if input_rel_folder_path != pattern_dirname:
                        # Strip out the base of the filter name
                        vs_filter = input_filter_name + '/' + input_rel_folder_path.replace(
                            pattern_dirname, '')
                    else:
                        vs_filter = input_filter_name
                else:
                    vs_filter = input_filter_name + '/' + input_rel_folder_path

        return vs_filter

    def _process_glob_entry(glob_content, filter_name, current_uber_dict):
        """
        Process a glob content from the input file list
        """
        if 'pattern' not in glob_content:
            raise Errors.WafError(
                'Missing keyword "pattern" from the glob entry"')

        original_pattern = glob_content.pop('pattern').replace('\\', '/')
        if original_pattern.startswith('@'):

            ALIAS_PATTERN = re.compile('@.*@')
            alias_match = ALIAS_PATTERN.search(original_pattern)
            if alias_match:
                alias = alias_match.group(0)[1:-1]
                pattern = original_pattern[len(alias) + 2:]
                if alias == 'ENGINE':
                    search_node = bld.path
                else:
                    search_node = bld.root.make_node(bld.ThirdPartyPath(alias))
            else:
                pattern = original_pattern
                search_node = waf_file_node.parent
        else:
            pattern = original_pattern
            search_node = waf_file_node.parent

        while pattern.startswith('../'):
            pattern = pattern[3:]
            search_node = search_node.parent

        glob_results = search_node.ant_glob(pattern, **glob_content)

        for globbed_file in glob_results:

            rel_path = globbed_file.path_from(waf_file_node.parent).replace(
                '\\', '/')
            abs_path = globbed_file.abspath().replace('\\', '/')
            rel_folder_path = os.path.dirname(rel_path)

            vs_filter = _determine_vs_filter(rel_folder_path, filter_name,
                                             original_pattern)

            if vs_filter not in current_uber_dict:
                current_uber_dict[vs_filter] = []
            if abs_path in dup_set:
                Logs.warn(
                    "[WARN] File '{}' specified by the pattern '{}' in waf file '{}' is a duplicate.  It will be ignored"
                    .format(abs_path, original_pattern, waf_file_node_abs))
            else:
                current_uber_dict[vs_filter].append(rel_path)
                dup_set.add(abs_path)

    def _clear_empty_uber_dict(current_uber_dict):
        """
        Perform house clean in case glob pattern overrides move all files out of a 'root' group.
        """
        empty_filters = []
        for filter_name, filter_contents in current_uber_dict.items():
            if len(filter_contents) == 0:
                empty_filters.append(filter_name)
        for empty_filter in empty_filters:
            current_uber_dict.pop(empty_filter)
        return current_uber_dict

    def _process_uber_dict(uber_section, uber_dict):
        """
        Process each uber dictionary value
        """
        processed_uber_dict = {}

        for filter_name, filter_contents in uber_dict.items():
            for filter_content in filter_contents:

                if isinstance(filter_content, str):

                    if '*' in filter_content or '?' in filter_content:
                        # If this is a raw glob pattern, stuff it into the expected glob dictionary
                        _process_glob_entry(dict(pattern=filter_content),
                                            filter_name, processed_uber_dict)
                    elif filter_content.startswith('@ENGINE@'):
                        file_path = os.path.normpath(
                            filter_content.replace('@ENGINE@',
                                                   bld.engine_path))
                        if not os.path.exists(file_path):
                            Logs.warn(
                                "[WARN] File '{}' specified in '{}' does not exist.  It will be ignored"
                                .format(file_path, waf_file_node_abs))
                        else:
                            if filter_name not in processed_uber_dict:
                                processed_uber_dict[filter_name] = []
                            processed_uber_dict[filter_name].append(
                                filter_content)
                            dup_set.add(file_path)
                    else:
                        # This is a straight up file reference.
                        # Do any processing on an aliased reference
                        if filter_content.startswith('@'):
                            processed_path = bld.PreprocessFilePath(
                                filter_content, _invalid_alias_callback,
                                _alias_not_enabled_callback)
                        else:
                            processed_path = os.path.normpath(
                                os.path.join(base_path_abs, filter_content))

                        if not os.path.exists(processed_path):
                            Logs.warn(
                                "[WARN] File '{}' specified in '{}' does not exist.  It will be ignored"
                                .format(processed_path, waf_file_node_abs))
                        elif not os.path.isfile(processed_path):
                            Logs.warn(
                                "[WARN] Path '{}' specified in '{}' is a folder, only files or glob patterns are "
                                "allowed.  It will be ignored".format(
                                    processed_path, waf_file_node_abs))
                        elif processed_path in dup_set:
                            Logs.warn(
                                "[WARN] File '{}' specified in '{}' is a duplicate.  It will be ignored"
                                .format(processed_path, waf_file_node_abs))
                        else:
                            if filter_name not in processed_uber_dict:
                                processed_uber_dict[filter_name] = []
                            processed_uber_dict[filter_name].append(
                                processed_path)
                            dup_set.add(processed_path)

                elif isinstance(filter_content, dict):
                    # Dictionaries automatically go through the glob pattern working
                    _process_glob_entry(filter_content, filter_name,
                                        processed_uber_dict)
                else:
                    raise Errors.WafError(
                        "Invalid entry '{}' in file '{}', section '{}/{}'".
                        format(filter_content, file, uber_section,
                               filter_name))

        return _clear_empty_uber_dict(processed_uber_dict)

    def _get_cached_file_list():
        """
        Calculate the location of the cached waf_files path
        """
        bintemp_path = os.path.join(bld.srcnode.abspath(), BINTEMP_FOLDER)
        src_relative_path = file_node.path_from(bld.srcnode)
        cached_waf_files_abs_path = os.path.join(bintemp_path,
                                                 src_relative_path)
        return cached_waf_files_abs_path

    file_node = bld.path.make_node(file)

    # Read the source waf_file list
    source_file_list = bld.parse_json_file(file_node)

    # Prepare a processed waf_file list
    processed_file_list = {}

    for uber_file_entry, uber_file_dict in source_file_list.items():
        processed_file_list[uber_file_entry] = _process_uber_dict(
            uber_file_entry, uber_file_dict)
        pass

    return processed_file_list
Beispiel #11
0
 def _invalid_alias_callback(alias_key):
     error_message = "Invalid alias '{}' specified in {}".format(
         alias_key, file)
     raise Errors.WafError(error_message)
Beispiel #12
0
    def add_moc_tasks(self):
        """
        Create the moc tasks by looking in ``bld.raw_deps[self.uid()]``
        """
        node = self.inputs[0]
        bld = self.generator.bld

        try:
            # compute the signature once to know if there is a moc file to create
            self.signature()
        except KeyError:
            # the moc file may be referenced somewhere else
            pass
        else:
            # remove the signature, it must be recomputed with the moc task
            delattr(self, "cache_sig")

        include_nodes = [node.parent] + self.generator.includes_nodes

        moctasks = []
        mocfiles = set()
        for d in bld.raw_deps.get(self.uid(), []):
            if not d.endswith(".moc"):
                continue

            # process that base.moc only once
            if d in mocfiles:
                continue
            mocfiles.add(d)

            # find the source associated with the moc file
            h_node = None

            base2 = d[:-4]
            for x in include_nodes:
                for e in self.moc_h_ext():
                    h_node = x.find_node(base2 + e)
                    if h_node:
                        break
                if h_node:
                    m_node = h_node.change_ext(".moc")
                    break
            else:
                # foo.cpp -> foo.cpp.moc
                for k in EXT_QT4:
                    if base2.endswith(k):
                        for x in include_nodes:
                            h_node = x.find_node(base2)
                            if h_node:
                                break
                        if h_node:
                            m_node = h_node.change_ext(k + ".moc")
                            break

            if not h_node:
                raise Errors.WafError(
                    "No source found for %r which is a moc file" % d)

            # create the moc task
            task = self.create_moc_task(h_node, m_node)
            moctasks.append(task)

        # simple scheduler dependency: run the moc task before others
        self.run_after.update(set(moctasks))
        self.moc_done = 1
Beispiel #13
0
 def __copy__(self):
     """Implemented to prevents copies of build contexts (raises an exception)"""
     raise Errors.WafError('build contexts are not supposed to be copied')
Beispiel #14
0
				kw['cwd']=kw['cwd'].abspath()
		encoding=kw.pop('decode_as',default_encoding)
		try:
			ret,out,err=Utils.run_process(cmd,kw,cargs)
		except Exception ,e:
			raise Errors.WafError('Execution failure: %s'%str(e),ex=e),None,sys.exc_info()[2]
		if not isinstance(out,str):
			out=out.decode(encoding,errors='replace')
		if not isinstance(err,str):
			err=err.decode(encoding,errors='replace')
		if out and quiet!=STDOUT and quiet!=BOTH:
			self.to_log('out: %s'%out)
		if err and quiet!=STDERR and quiet!=BOTH:
			self.to_log('err: %s'%err)
		if ret:
			e=Errors.WafError('Command %r returned %r'%(cmd,ret))
			e.returncode=ret
			e.stderr=err
			e.stdout=out
			raise e
		if to_ret==BOTH:
			return(out,err)
		elif to_ret==STDERR:
			return err
		return out
	def fatal(self,msg,ex=None):
		if self.logger:
			self.logger.info('from %s: %s'%(self.path.abspath(),msg))
		try:
			logfile=self.logger.handlers[0].baseFilename
		except AttributeError:
Beispiel #15
0
def process_use(self):
    """
    Process the ``use`` attribute which contains a list of task generator names::

            def build(bld):
                    bld.shlib(source='a.c', target='lib1')
                    bld.program(source='main.c', target='app', use='lib1')

    See :py:func:`waflib.Tools.ccroot.use_rec`.
    """

    use_not = self.tmp_use_not = set()
    self.tmp_use_seen = []  # we would like an ordered set
    use_prec = self.tmp_use_prec = {}
    self.uselib = self.to_list(getattr(self, "uselib", []))
    self.includes = self.to_list(getattr(self, "includes", []))
    names = self.to_list(getattr(self, "use", []))

    for x in names:
        self.use_rec(x)

    for x in use_not:
        if x in use_prec:
            del use_prec[x]

    # topological sort
    out = self.tmp_use_sorted = []
    tmp = []
    for x in self.tmp_use_seen:
        for k in use_prec.values():
            if x in k:
                break
        else:
            tmp.append(x)

    while tmp:
        e = tmp.pop()
        out.append(e)
        try:
            nlst = use_prec[e]
        except KeyError:
            pass
        else:
            del use_prec[e]
            for x in nlst:
                for y in use_prec:
                    if x in use_prec[y]:
                        break
                else:
                    tmp.append(x)
    if use_prec:
        raise Errors.WafError("Cycle detected in the use processing %r" %
                              use_prec)
    out.reverse()

    link_task = getattr(self, "link_task", None)
    for x in out:
        y = self.bld.get_tgen_by_name(x)
        var = y.tmp_use_var
        if var and link_task:
            if var == "LIB" or y.tmp_use_stlib or x in names:
                self.env.append_value(var,
                                      [y.target[y.target.rfind(os.sep) + 1:]])
                self.link_task.dep_nodes.extend(y.link_task.outputs)
                tmp_path = y.link_task.outputs[0].parent.path_from(
                    self.get_cwd())
                self.env.append_unique(var + "PATH", [tmp_path])
        else:
            if y.tmp_use_objects:
                self.add_objects_from_tgen(y)

        if getattr(y, "export_includes", None):
            # self.includes may come from a global variable #2035
            self.includes = self.includes + y.to_incnodes(y.export_includes)

        if getattr(y, "export_defines", None):
            self.env.append_value("DEFINES", self.to_list(y.export_defines))

    # and finally, add the use variables (no recursion needed)
    for x in names:
        try:
            y = self.bld.get_tgen_by_name(x)
        except Errors.WafError:
            if not self.env["STLIB_" + x] and not x in self.uselib:
                self.uselib.append(x)
        else:
            for k in self.to_list(getattr(y, "use", [])):
                if not self.env["STLIB_" + k] and not k in self.uselib:
                    self.uselib.append(k)
    def post(self):
        """
		Create task objects. The following operations are performed:

		#. The body of this method is called only once and sets the attribute ``posted``
		#. The attribute ``features`` is used to add more methods in ``self.meths``
		#. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec`
		#. The methods are then executed in order
		#. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
		"""

        # we could add a decorator to let the task run once, but then python 2.3 will be difficult to support
        if getattr(self, 'posted', None):
            #error("OBJECT ALREADY POSTED" + str( self))
            return False
        self.posted = True

        keys = set(self.meths)

        # add the methods listed in the features
        self.features = Utils.to_list(self.features)
        for x in self.features + ['*']:
            st = feats[x]
            if not st:
                if not x in Task.classes:
                    Logs.warn(
                        'feature %r does not exist - bind at least one method to it'
                        % x)
            keys.update(list(st))  # ironpython 2.7 wants the cast to list

        # copy the precedence table
        prec = {}
        prec_tbl = self.prec or task_gen.prec
        for x in prec_tbl:
            if x in keys:
                prec[x] = prec_tbl[x]

        # elements disconnected
        tmp = []
        for a in keys:
            for x in prec.values():
                if a in x: break
            else:
                tmp.append(a)

        # TODO waf 1.7
        #tmp.sort()

        # topological sort
        out = []
        while tmp:
            e = tmp.pop()
            if e in keys: out.append(e)
            try:
                nlst = prec[e]
            except KeyError:
                pass
            else:
                del prec[e]
                for x in nlst:
                    for y in prec:
                        if x in prec[y]:
                            break
                    else:
                        tmp.append(x)

        if prec:
            raise Errors.WafError('Cycle detected in the method execution %r' %
                                  prec)
        out.reverse()
        self.meths = out

        # then we run the methods in order
        Logs.debug('task_gen: posting %s %d' % (self, id(self)))
        for x in out:
            try:
                v = getattr(self, x)
            except AttributeError:
                raise Errors.WafError(
                    '%r is not a valid task generator method' % x)
            Logs.debug('task_gen: -> %s (%d)' % (x, id(self)))
            v()

        Logs.debug('task_gen: posted %s' % self.name)
        return True
Beispiel #17
0
def apply_vnum(self):
    """
    Enforce version numbering on shared libraries. The valid version numbers must have either zero or two dots::

            def build(bld):
                    bld.shlib(source='a.c', target='foo', vnum='14.15.16')

    In this example on Linux platform, ``libfoo.so`` is installed as ``libfoo.so.14.15.16``, and the following symbolic links are created:

    * ``libfoo.so    → libfoo.so.14.15.16``
    * ``libfoo.so.14 → libfoo.so.14.15.16``

    By default, the library will be assigned SONAME ``libfoo.so.14``, effectively declaring ABI compatibility between all minor and patch releases for the major version of the library.  When necessary, the compatibility can be explicitly defined using `cnum` parameter:

            def build(bld):
                    bld.shlib(source='a.c', target='foo', vnum='14.15.16', cnum='14.15')

    In this case, the assigned SONAME will be ``libfoo.so.14.15`` with ABI compatibility only between path releases for a specific major and minor version of the library.

    On OS X platform, install-name parameter will follow the above logic for SONAME with exception that it also specifies an absolute path (based on install_path) of the library.
    """
    if (not getattr(self, "vnum", "") or os.name != "posix"
            or self.env.DEST_BINFMT not in ("elf", "mac-o")):
        return

    link = self.link_task
    if not re_vnum.match(self.vnum):
        raise Errors.WafError("Invalid vnum {!r} for target {!r}".format(
            self.vnum, getattr(self, "name", self)))
    nums = self.vnum.split(".")
    node = link.outputs[0]

    cnum = getattr(self, "cnum", str(nums[0]))
    cnums = cnum.split(".")
    if len(cnums) > len(nums) or nums[0:len(cnums)] != cnums:
        raise Errors.WafError("invalid compatibility version %s" % cnum)

    libname = node.name
    if libname.endswith(".dylib"):
        name3 = libname.replace(".dylib", ".%s.dylib" % self.vnum)
        name2 = libname.replace(".dylib", ".%s.dylib" % cnum)
    else:
        name3 = libname + "." + self.vnum
        name2 = libname + "." + cnum

    # add the so name for the ld linker - to disable, just unset env.SONAME_ST
    if self.env.SONAME_ST:
        v = self.env.SONAME_ST % name2
        self.env.append_value("LINKFLAGS", v.split())

    # the following task is just to enable execution from the build dir :-/
    if self.env.DEST_OS != "openbsd":
        outs = [node.parent.make_node(name3)]
        if name2 != name3:
            outs.append(node.parent.make_node(name2))
        self.create_task("vnum", node, outs)

    if getattr(self, "install_task", None):
        self.install_task.hasrun = Task.SKIPPED
        path = self.install_task.install_to
        if self.env.DEST_OS == "openbsd":
            libname = self.link_task.outputs[0].name
            t1 = self.add_install_as(install_to=f"{path}/{libname}",
                                     install_from=node,
                                     chmod=self.link_task.chmod)
            self.vnum_install_task = (t1, )
        else:
            t1 = self.add_install_as(install_to=path + os.sep + name3,
                                     install_from=node,
                                     chmod=self.link_task.chmod)
            t3 = self.add_symlink_as(install_to=path + os.sep + libname,
                                     install_from=name3)
            if name2 != name3:
                t2 = self.add_symlink_as(install_to=path + os.sep + name2,
                                         install_from=name3)
                self.vnum_install_task = (t1, t2, t3)
            else:
                self.vnum_install_task = (t1, t3)

    if "-dynamiclib" in self.env.LINKFLAGS:
        # this requires after(propagate_uselib_vars)
        try:
            inst_to = self.install_path
        except AttributeError:
            inst_to = self.link_task.inst_to
        if inst_to:
            p = Utils.subst_vars(inst_to, self.env)
            path = os.path.join(p, name2)
            self.env.append_value("LINKFLAGS", ["-install_name", path])
            self.env.append_value("LINKFLAGS",
                                  "-Wl,-compatibility_version,%s" % cnum)
            self.env.append_value("LINKFLAGS",
                                  "-Wl,-current_version,%s" % self.vnum)
Beispiel #18
0
 def __copy__(self):
     raise Errors.WafError('nodes are not supposed to be copied')
Beispiel #19
0
def SAMBA_LIBRARY(bld, libname, source,
                  deps='',
                  public_deps='',
                  includes='',
                  public_headers=None,
                  public_headers_install=True,
                  private_headers=None,
                  header_path=None,
                  pc_files=None,
                  vnum=None,
                  soname=None,
                  cflags='',
                  cflags_end=None,
                  ldflags='',
                  external_library=False,
                  realname=None,
                  keep_underscore=False,
                  autoproto=None,
                  autoproto_extra_source='',
                  group='main',
                  depends_on='',
                  local_include=True,
                  global_include=True,
                  vars=None,
                  subdir=None,
                  install_path=None,
                  install=True,
                  pyembed=False,
                  pyext=False,
                  target_type='LIBRARY',
                  bundled_extension=False,
                  bundled_name=None,
                  link_name=None,
                  abi_directory=None,
                  abi_match=None,
                  hide_symbols=False,
                  manpages=None,
                  private_library=False,
                  grouping_library=False,
                  allow_undefined_symbols=False,
                  allow_warnings=False,
                  enabled=True):
    '''define a Samba library'''

    if private_library and public_headers:
        raise Errors.WafError("private library '%s' must not have public header files" %
                             libname)

    if LIB_MUST_BE_PRIVATE(bld, libname):
        private_library = True

    if not enabled:
        SET_TARGET_TYPE(bld, libname, 'DISABLED')
        return

    source = bld.EXPAND_VARIABLES(source, vars=vars)
    if subdir:
        source = bld.SUBDIR(subdir, source)

    # remember empty libraries, so we can strip the dependencies
    if ((source == '') or (source == [])):
        if deps == '' and public_deps == '':
            SET_TARGET_TYPE(bld, libname, 'EMPTY')
            return
        empty_c = libname + '.empty.c'
        bld.SAMBA_GENERATOR('%s_empty_c' % libname,
                            rule=generate_empty_file,
                            target=empty_c)
        source=empty_c

    if BUILTIN_LIBRARY(bld, libname):
        obj_target = libname
    else:
        obj_target = libname + '.objlist'

    if group == 'libraries':
        subsystem_group = 'main'
    else:
        subsystem_group = group

    # first create a target for building the object files for this library
    # by separating in this way, we avoid recompiling the C files
    # separately for the install library and the build library
    bld.SAMBA_SUBSYSTEM(obj_target,
                        source         = source,
                        deps           = deps,
                        public_deps    = public_deps,
                        includes       = includes,
                        public_headers = public_headers,
                        public_headers_install = public_headers_install,
                        private_headers= private_headers,
                        header_path    = header_path,
                        cflags         = cflags,
                        cflags_end     = cflags_end,
                        group          = subsystem_group,
                        autoproto      = autoproto,
                        autoproto_extra_source=autoproto_extra_source,
                        depends_on     = depends_on,
                        hide_symbols   = hide_symbols,
                        allow_warnings = allow_warnings,
                        pyembed        = pyembed,
                        pyext          = pyext,
                        local_include  = local_include,
                        global_include = global_include)

    if BUILTIN_LIBRARY(bld, libname):
        return

    if not SET_TARGET_TYPE(bld, libname, target_type):
        return

    # the library itself will depend on that object target
    deps += ' ' + public_deps
    deps = TO_LIST(deps)
    deps.append(obj_target)

    realname = bld.map_shlib_extension(realname, python=(target_type=='PYTHON'))
    link_name = bld.map_shlib_extension(link_name, python=(target_type=='PYTHON'))

    # we don't want any public libraries without version numbers
    if (not private_library and target_type != 'PYTHON' and not realname):
        if vnum is None and soname is None:
            raise Errors.WafError("public library '%s' must have a vnum" %
                    libname)
        if pc_files is None:
            raise Errors.WafError("public library '%s' must have pkg-config file" %
                       libname)
        if public_headers is None:
            raise Errors.WafError("public library '%s' must have header files" %
                       libname)

    if private_library and not vnum:
        vnum = '0'

    if bundled_name is not None:
        pass
    elif target_type == 'PYTHON' or realname or not private_library:
        if keep_underscore:
            bundled_name = libname
        else:
            bundled_name = libname.replace('_', '-')
    else:
        assert (private_library == True and realname is None)
        if abi_directory or vnum or soname:
            bundled_extension=True
        bundled_name = PRIVATE_NAME(bld, libname.replace('_', '-'),
                                    bundled_extension, private_library)

    ldflags = TO_LIST(ldflags)
    if bld.env['ENABLE_RELRO'] is True:
        ldflags.extend(TO_LIST('-Wl,-z,relro,-z,now'))

    features = 'c cshlib symlink_lib install_lib'
    if pyext:
        features += ' pyext'
    if pyembed:
        features += ' pyembed'

    if abi_directory:
        features += ' abi_check'

    if pyembed and bld.env['PYTHON_SO_ABI_FLAG']:
        # For ABI checking, we don't care about the Python version.
        # Remove the Python ABI tag (e.g. ".cpython-35m")
        abi_flag = bld.env['PYTHON_SO_ABI_FLAG']
        replacement = ''
        version_libname = libname.replace(abi_flag, replacement)
    else:
        version_libname = libname

    vscript = None
    if bld.env.HAVE_LD_VERSION_SCRIPT:
        if private_library:
            version = "%s_%s" % (Context.g_module.APPNAME, Context.g_module.VERSION)
        elif vnum:
            version = "%s_%s" % (libname, vnum)
        else:
            version = None
        if version:
            vscript = "%s.vscript" % libname
            bld.ABI_VSCRIPT(version_libname, abi_directory, version, vscript,
                            abi_match)
            fullname = apply_pattern(bundled_name, bld.env.cshlib_PATTERN)
            fullpath = bld.path.find_or_declare(fullname)
            vscriptpath = bld.path.find_or_declare(vscript)
            if not fullpath:
                raise Errors.WafError("unable to find fullpath for %s" % fullname)
            if not vscriptpath:
                raise Errors.WafError("unable to find vscript path for %s" % vscript)
            bld.add_manual_dependency(fullpath, vscriptpath)
            if bld.is_install:
                # also make the .inst file depend on the vscript
                instname = apply_pattern(bundled_name + '.inst', bld.env.cshlib_PATTERN)
                bld.add_manual_dependency(bld.path.find_or_declare(instname), bld.path.find_or_declare(vscript))
            vscript = os.path.join(bld.path.abspath(bld.env), vscript)

    bld.SET_BUILD_GROUP(group)
    t = bld(
        features        = features,
        source          = [],
        target          = bundled_name,
        depends_on      = depends_on,
        samba_ldflags   = ldflags,
        samba_deps      = deps,
        samba_includes  = includes,
        version_script  = vscript,
        version_libname = version_libname,
        local_include   = local_include,
        global_include  = global_include,
        vnum            = vnum,
        soname          = soname,
        install_path    = None,
        samba_inst_path = install_path,
        name            = libname,
        samba_realname  = realname,
        samba_install   = install,
        abi_directory   = "%s/%s" % (bld.path.abspath(), abi_directory),
        abi_match       = abi_match,
        private_library = private_library,
        grouping_library=grouping_library,
        allow_undefined_symbols=allow_undefined_symbols
        )

    if realname and not link_name:
        link_name = 'shared/%s' % realname

    if link_name:
        if 'waflib.extras.compat15' in sys.modules:
            link_name = 'default/' + link_name
        t.link_name = link_name

    if pc_files is not None and not private_library:
        if pyembed:
            bld.PKG_CONFIG_FILES(pc_files, vnum=vnum, extra_name=bld.env['PYTHON_SO_ABI_FLAG'])
        else:
            bld.PKG_CONFIG_FILES(pc_files, vnum=vnum)

    if (manpages is not None and 'XSLTPROC_MANPAGES' in bld.env and
        bld.env['XSLTPROC_MANPAGES']):
        bld.MANPAGES(manpages, install)
Beispiel #20
0
def ASSERT(ctx, expression, msg):
    '''a build assert call'''
    if not expression:
        raise Errors.WafError("ERROR: %s\n" % msg)
    def exec_command(self, cmd, **kw):
        """
        Runs an external process and returns the exit status::

                def run(tsk):
                        ret = tsk.generator.bld.exec_command('touch foo.txt')
                        return ret

        If the context has the attribute 'log', then captures and logs the process stderr/stdout.
        Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the
        stdout/stderr values captured.

        :param cmd: command argument for subprocess.Popen
        :type cmd: string or list
        :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
        :type kw: dict
        :returns: process exit status
        :rtype: integer
        :raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
        :raises: :py:class:`waflib.Errors.WafError` in case of execution failure
        """
        subprocess = Utils.subprocess
        kw["shell"] = isinstance(cmd, str)
        self.log_command(cmd, kw)

        if self.logger:
            self.logger.info(cmd)

        if "stdout" not in kw:
            kw["stdout"] = subprocess.PIPE
        if "stderr" not in kw:
            kw["stderr"] = subprocess.PIPE

        if Logs.verbose and not kw["shell"] and not Utils.check_exe(cmd[0]):
            raise Errors.WafError("Program %s not found!" % cmd[0])

        cargs = {}
        if "timeout" in kw:
            if sys.hexversion >= 0x3030000:
                cargs["timeout"] = kw["timeout"]
                if not "start_new_session" in kw:
                    kw["start_new_session"] = True
            del kw["timeout"]
        if "input" in kw:
            if kw["input"]:
                cargs["input"] = kw["input"]
                kw["stdin"] = subprocess.PIPE
            del kw["input"]

        if "cwd" in kw:
            if not isinstance(kw["cwd"], str):
                kw["cwd"] = kw["cwd"].abspath()

        encoding = kw.pop("decode_as", default_encoding)

        try:
            ret, out, err = Utils.run_process(cmd, kw, cargs)
        except Exception as e:
            raise Errors.WafError("Execution failure: %s" % str(e), ex=e)

        if out:
            if not isinstance(out, str):
                out = out.decode(encoding, errors="replace")
            if self.logger:
                self.logger.debug("out: %s", out)
            else:
                Logs.info(out, extra={"stream": sys.stdout, "c1": ""})
        if err:
            if not isinstance(err, str):
                err = err.decode(encoding, errors="replace")
            if self.logger:
                self.logger.error("err: %s" % err)
            else:
                Logs.info(err, extra={"stream": sys.stderr, "c1": ""})

        return ret
Beispiel #22
0
    def build_exe(self, bld):
        if 'MAKENSIS' not in bld.env:
            Logs.error("makensis not installed. Can't complete")
            return
        for t in ('appname', 'version'):
            if not hasattr(self, t):
                raise Errors.WafError("Package '%r' needs '%s' attribute" %
                                      (self, t))
        thisdir = os.path.dirname(__file__)
        fonts = [x for x in self.fonts if not hasattr(x, 'dontship')]
        kbds = [x for x in self.keyboards if not hasattr(x, 'dontship')]
        if not hasattr(self, 'license'):
            if fonts and kbds:
                # make new file and copy OFL.txt and MIT.txt into it
                self.license = 'LICENSE'
                font_license = bld.bldnode.find_resource('OFL.txt')
                if not font_license:
                    raise Errors.WafError(
                        "The font license file OFL.txt doesn't exist so cannot build exe"
                    )
                kb_license = bld.bldnode.find_resource('MIT.txt')
                if not kb_license:
                    raise Errors.WafError(
                        "The keyboard license file MIT.txt doesn't exist so cannot build exe"
                    )
                f = open("LICENSE", "w")
                for tempfile in kb_license, font_license:
                    f.write(tempfile.read())
                    f.write("\n")
                f.close()
            elif kbds:
                self.license = 'MIT.txt'
            else:
                self.license = 'OFL.txt'
        if self.license is not None and not bld.bldnode.find_resource(
                self.license):
            raise Errors.WafError("The license file " + self.license +
                                  " does not exist so cannot build exe.")

        env = {
            'project': self,
            'basedir': thisdir,
            'fonts': fonts,
            'kbds': kbds,
            'env': bld.env
        }

        def blddir(base, val):
            x = bld.bldnode.find_resource(val)
            base = os.path.join(bld.srcnode.abspath(), base.package.reldir,
                                bld.bldnode.srcpath())
            return os.path.join(base, x.bldpath())

        # create a taskgen to expand the installer.nsi
        bname = 'installer_' + self.appname

        def procpkg(p, c):
            for k in p.keyboards:
                k.setup_vars(c)
            kbds.extend(p.keyboards)
            fonts.extend(p.fonts)

        self.subrun(bld, procpkg, onlyfn=True)
        task = templater.Copier(prj=self,
                                fonts=fonts,
                                kbds=kbds,
                                basedir=thisdir,
                                env=bld.env,
                                bld=blddir)
        task.set_inputs(
            bld.root.find_resource(
                self.exetemplate if hasattr(self, 'exetemplate') else os.path.
                join(thisdir, 'installer.nsi')))
        for d, x in self.get_files(bld):
            if not x: continue
            r = os.path.relpath(os.path.join(d, x), bld.bldnode.abspath())
            y = bld.bldnode.find_or_declare(r)
            if os.path.isfile(y and y.abspath()): task.set_inputs(y)

        task.set_outputs(bld.bldnode.find_or_declare(bname + '.nsi'))
        bld.add_to_group(task)
        bld(rule='${MAKENSIS} -V4 -O' + bname + '.log ${SRC}',
            source=bname + '.nsi',
            target='%s/%s-%s.exe' %
            ((self.outdir or '.'),
             (self.desc_name or self.appname.title()), self.version))
Beispiel #23
0
    def cmd_and_log(self, cmd, **kw):
        """
		Execute a command and return stdout if the execution is successful.
		An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
		will be bound to the WafError object::

			def configure(conf):
				out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
				(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
				try:
					conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
				except Exception as e:
					print(e.stdout, e.stderr)

		:param cmd: args for subprocess.Popen
		:param kw: keyword arguments for subprocess.Popen
		"""
        subprocess = Utils.subprocess
        kw['shell'] = isinstance(cmd, str)
        Logs.debug('runner: %r' % cmd)

        if 'quiet' in kw:
            quiet = kw['quiet']
            del kw['quiet']
        else:
            quiet = None

        if 'output' in kw:
            to_ret = kw['output']
            del kw['output']
        else:
            to_ret = STDOUT

        if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
            raise Errors.WafError("Program %s not found!" % cmd[0])

        kw['stdout'] = kw['stderr'] = subprocess.PIPE
        if quiet is None:
            self.to_log(cmd)
        try:
            p = subprocess.Popen(cmd, **kw)
            (out, err) = p.communicate()
        except Exception as e:
            raise Errors.WafError('Execution failure: %s' % str(e), ex=e)

        if not isinstance(out, str):
            out = out.decode(sys.stdout.encoding or 'iso8859-1')
        if not isinstance(err, str):
            err = err.decode(sys.stdout.encoding or 'iso8859-1')

        if out and quiet != STDOUT and quiet != BOTH:
            self.to_log('out: %s' % out)
        if err and quiet != STDERR and quiet != BOTH:
            self.to_log('err: %s' % err)

        if p.returncode:
            e = Errors.WafError('Command %r returned %r' % (cmd, p.returncode))
            e.returncode = p.returncode
            e.stderr = err
            e.stdout = out
            raise e

        if to_ret == BOTH:
            return (out, err)
        elif to_ret == STDERR:
            return err
        return out
Beispiel #24
0
    def execute_build(self):
        # check we have all the info we need
        if os.path.exists('debian'):
            Logs.warn(
                "debian/ packaging folder already exists, did not generate new templates"
            )
            return
        Logs.warn("debian/ packaging folder templates generated")

        globalpackage = Package.packagestore[Package.globalpackage]
        srcname = getattr(globalpackage, 'debpkg', None)
        if not srcname:
            raise Errors.WafError(
                'No debpkg information given to default_package. E.g. set DEBPKG'
            )
        srcversion = getattr(globalpackage, 'version', None)
        if not srcversion:
            raise Errors.WafError(
                'No version information given to default_package. E.g. set VERSION'
            )
        maint = os.getenv('DEBFULLNAME') + ' <' + os.getenv('DEBEMAIL') + '>'
        if not maint:
            raise Errors.WafError(
                "I don't know who you are, please set the DEBFULLNAME and DEBEMAIL environment variables"
            )
        license = getattr(globalpackage, 'license', None)
        if not license:
            raise Errors.WafError(
                "default_package needs a license. E.g. set LICENSE")
        license = self.bldnode.find_resource(license)
        if not license:
            raise Errors.WafError(
                "The license file doesn't exist, perhaps you need to smith build first"
            )

        contact = getattr(globalpackage, 'contact', '')
        if not contact:
            Logs.warn("Optional contact information not provided.")

        url = getattr(globalpackage, 'url', '')
        if not url:
            Logs.warn("Optional upstream URL not provided.")

        # install and dirs files
        os.makedirs('debian/bin')
        shutil.copy(sys.argv[0], 'debian/bin')
        hasfonts = 0
        haskbds = False
        for p in Package.packages():
            pname = getattr(p, 'debpkg', None)
            if not pname: continue
            fdir = "/usr/share/fonts/opentype/" + pname + "\n"
            fdirs = file(os.path.join('debian', 'dirs'), 'w')
            if len(p.fonts):
                fdirs.write(fdir)
                hasfonts = hasfonts | 1
            fdirs.close()
            finstall = file(os.path.join('debian', 'install'), 'w')
            for f in p.fonts:
                finstall.write(
                    getattr(Context.g_module, 'out', 'results') + "/" +
                    f.target + "\t" + fdir)
                if hasattr(f, 'graphite'): hasfonts = hasfonts | 2
            finstall.close()

        # source format
        os.makedirs('debian/source')
        fformat = file(os.path.join('debian', 'source', 'format'), 'w')
        fformat.write('''3.0 (quilt)''')
        fformat.close()

        # changelog
        fchange = file(os.path.join('debian', 'changelog'), 'w')
        fchange.write('''{0} ({1}-1) unstable; urgency=low

  * Release of ... under ... 
  * Describe your significant changes here (use dch to help you fill in the changelog automatically).

 -- {2}  {3} {4}
'''.format(srcname, srcversion, maint, time.strftime("%a, %d %b %Y %H:%M:%S"),
           formattz(time.altzone)))
        fchange.close()

        # copyright (needs http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ machine-readable format)
        shutil.copy(license.abspath(), os.path.join('debian', 'copyright'))

        # control  (needs Homepage: field)
        bdeps = []
        if hasfonts & 1:
            bdeps.append('libfont-ttf-scripts-perl, python-palaso, fontforge')
        if hasfonts & 2:
            bdeps.append('grcompiler')
        if maint: maint = "\nMaintainer: " + maint
        fcontrol = file(os.path.join('debian', 'control'), 'w')
        fcontrol.write('''Source: {0}
Priority: optional
Section: fonts{1}
Build-Depends: debhelper (>= 9~), {2}
Standards-Version: 3.9.6
Homepage: {3}
X-contact: {4}

'''.format(srcname, maint, ", ".join(bdeps), url, contact))
        for p in Package.packages():
            pname = getattr(p, 'debpkg', None)
            if not pname: continue
            fcontrol.write('''Package: {0}
Section: fonts
Architecture: all
Multi-Arch: foreign
Depends: ${{misc:Depends}}
Description: {1}
{2}

'''.format(pname, p.desc_short, formatdesc(p.desc_long)))
        fcontrol.close()

        # other files
        fileinfo = {
            'rules': '''#!/usr/bin/make -f

SMITH=debian/bin/smith
%:
	dh $@

override_dh_auto_configure :
	${SMITH} configure

override_dh_auto_build :
	${SMITH} build

override_dh_auto_clean :
	${SMITH} distclean

override_dh_auto_test :

override_dh_auto_install :

override_dh_installchangelogs:
	dh_installchangelogs -k FONTLOG.txt

override_dh_builddeb:
	dh_builddeb -- -Zxz -Sextreme -z9
	#dh_builddeb -- -Zxz -z9
''',
            'compat': '9'
        }
        for k, v in fileinfo.items():
            f = file(os.path.join('debian', k), 'w')
            f.write(v + "\n")
            if k == 'rules': os.fchmod(f.fileno(), 0775)
            f.close()

        # docs file  (probably needs a conditional on web/ and sources/ too)
        fdocs = file(os.path.join('debian', 'doc'), 'w')
        fdocs.write('''*.txt
documentation/''')
        fdocs.close()

        # watch file
        fwatch = file(os.path.join('debian', 'watch'), 'w')
        fwatch.write(
            '''# access to the tarballs on the release server is not yet automated'''
        )
        fwatch.close()
Beispiel #25
0
    def exec_command(self, cmd, **kw):
        """
		Runs an external process and returns the exit status::

			def run(tsk):
				ret = tsk.generator.bld.exec_command('touch foo.txt')
				return ret

		If the context has the attribute 'log', then captures and logs the process stderr/stdout.
		Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the
		stdout/stderr values captured.

		:param cmd: command argument for subprocess.Popen
		:type cmd: string or list
		:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
		:type kw: dict
		:returns: process exit status
		:rtype: integer
		:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
		:raises: :py:class:`waflib.Errors.WafError` in case of execution failure
		"""
        subprocess = Utils.subprocess
        kw['shell'] = isinstance(cmd, str)
        self.log_command(cmd, kw)

        if self.logger:
            self.logger.info(cmd)

        if 'stdout' not in kw:
            kw['stdout'] = subprocess.PIPE
        if 'stderr' not in kw:
            kw['stderr'] = subprocess.PIPE

        if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
            raise Errors.WafError('Program %s not found!' % cmd[0])

        cargs = {}
        if 'timeout' in kw:
            if sys.hexversion >= 0x3030000:
                cargs['timeout'] = kw['timeout']
                if not 'start_new_session' in kw:
                    kw['start_new_session'] = True
            del kw['timeout']
        if 'input' in kw:
            if kw['input']:
                cargs['input'] = kw['input']
                kw['stdin'] = subprocess.PIPE
            del kw['input']

        if 'cwd' in kw:
            if not isinstance(kw['cwd'], str):
                kw['cwd'] = kw['cwd'].abspath()

        try:
            ret, out, err = Utils.run_process(cmd, kw, cargs)
        except Exception as e:
            raise Errors.WafError('Execution failure: %s' % str(e), ex=e)

        if out:
            if not isinstance(out, str):
                out = out.decode(sys.stdout.encoding or 'latin-1',
                                 errors='replace')
            if self.logger:
                self.logger.debug('out: %s', out)
            else:
                Logs.info(out, extra={'stream': sys.stdout, 'c1': ''})
        if err:
            if not isinstance(err, str):
                err = err.decode(sys.stdout.encoding or 'latin-1',
                                 errors='replace')
            if self.logger:
                self.logger.error('err: %s' % err)
            else:
                Logs.info(err, extra={'stream': sys.stderr, 'c1': ''})

        return ret
Beispiel #26
0
def apply_vnum(self):
    """
	Enforce version numbering on shared libraries. The valid version numbers must have at most two dots::

		def build(bld):
			bld.shlib(source='a.c', target='foo', vnum='14.15.16')

	In this example, ``libfoo.so`` is installed as ``libfoo.so.1.2.3``, and the following symbolic links are created:

	* ``libfoo.so   → libfoo.so.1.2.3``
	* ``libfoo.so.1 → libfoo.so.1.2.3``
	"""
    if not getattr(self, 'vnum',
                   '') or os.name != 'posix' or self.env.DEST_BINFMT not in (
                       'elf', 'mac-o'):
        return

    link = self.link_task
    if not re_vnum.match(self.vnum):
        raise Errors.WafError('Invalid version %r for %r' % (self.vnum, self))
    nums = self.vnum.split('.')
    node = link.outputs[0]

    libname = node.name
    if libname.endswith('.dylib'):
        name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
        name2 = libname.replace('.dylib', '.%s.dylib' % nums[0])
    else:
        name3 = libname + '.' + self.vnum
        name2 = libname + '.' + nums[0]

    # add the so name for the ld linker - to disable, just unset env.SONAME_ST
    if self.env.SONAME_ST:
        v = self.env.SONAME_ST % name2
        self.env.append_value('LINKFLAGS', v.split())

    # the following task is just to enable execution from the build dir :-/

    if self.env.DEST_OS != 'openbsd':
        self.create_task('vnum', node, [
            node.parent.find_or_declare(name2),
            node.parent.find_or_declare(name3)
        ])

    if getattr(self, 'install_task', None):
        self.install_task.hasrun = Task.SKIP_ME
        bld = self.bld
        path = self.install_task.dest
        if self.env.DEST_OS == 'openbsd':
            libname = self.link_task.outputs[0].name
            t1 = bld.install_as('%s%s%s' % (path, os.sep, libname),
                                node,
                                env=self.env,
                                chmod=self.link_task.chmod)
            self.vnum_install_task = (t1, )
        else:
            t1 = bld.install_as(path + os.sep + name3,
                                node,
                                env=self.env,
                                chmod=self.link_task.chmod)
            t2 = bld.symlink_as(path + os.sep + name2, name3)
            t3 = bld.symlink_as(path + os.sep + libname, name3)
            self.vnum_install_task = (t1, t2, t3)

    if '-dynamiclib' in self.env['LINKFLAGS']:
        # this requires after(propagate_uselib_vars)
        try:
            inst_to = self.install_path
        except AttributeError:
            inst_to = self.link_task.__class__.inst_to
        if inst_to:
            p = Utils.subst_vars(inst_to, self.env)
            path = os.path.join(p, self.link_task.outputs[0].name)
            self.env.append_value('LINKFLAGS', ['-install_name', path])
Beispiel #27
0
def multicheck(self, *k, **kw):
    """
	Runs configuration tests in parallel; results are printed sequentially at the end of the build
	but each test must provide its own msg value to display a line::

		def test_build(ctx):
			ctx.in_msg = True # suppress console outputs
			ctx.check_large_file(mandatory=False)

		conf.multicheck(
			{'header_name':'stdio.h', 'msg':'... stdio', 'uselib_store':'STDIO', 'global_define':False},
			{'header_name':'xyztabcd.h', 'msg':'... optional xyztabcd.h', 'mandatory': False},
			{'header_name':'stdlib.h', 'msg':'... stdlib', 'okmsg': 'aye', 'errmsg': 'nope'},
			{'func': test_build, 'msg':'... testing an arbitrary build function', 'okmsg':'ok'},
			msg       = 'Checking for headers in parallel',
			mandatory = True, # mandatory tests raise an error at the end
			run_all_tests = True, # try running all tests
		)

	The configuration tests may modify the values in conf.env in any order, and the define
	values can affect configuration tests being executed. It is hence recommended
	to provide `uselib_store` values with `global_define=False` to prevent such issues.
	"""
    self.start_msg(kw.get('msg', 'Executing %d configuration tests' % len(k)),
                   **kw)

    # Force a copy so that threads append to the same list at least
    # no order is guaranteed, but the values should not disappear at least
    for var in ('DEFINES', DEFKEYS):
        self.env.append_value(var, [])
    self.env.DEFINE_COMMENTS = self.env.DEFINE_COMMENTS or {}

    # define a task object that will execute our tests
    class par(object):
        def __init__(self):
            self.keep = False
            self.task_sigs = {}
            self.progress_bar = 0

        def total(self):
            return len(tasks)

        def to_log(self, *k, **kw):
            return

    bld = par()
    bld.keep = kw.get('run_all_tests', True)
    bld.imp_sigs = {}
    tasks = []

    id_to_task = {}
    for dct in k:
        x = Task.classes['cfgtask'](bld=bld, env=None)
        tasks.append(x)
        x.args = dct
        x.bld = bld
        x.conf = self
        x.args = dct

        # bind a logger that will keep the info in memory
        x.logger = Logs.make_mem_logger(str(id(x)), self.logger)

        if 'id' in dct:
            id_to_task[dct['id']] = x

    # second pass to set dependencies with after_test/before_test
    for x in tasks:
        for key in Utils.to_list(x.args.get('before_tests', [])):
            tsk = id_to_task[key]
            if not tsk:
                raise ValueError('No test named %r' % key)
            tsk.run_after.add(x)
        for key in Utils.to_list(x.args.get('after_tests', [])):
            tsk = id_to_task[key]
            if not tsk:
                raise ValueError('No test named %r' % key)
            x.run_after.add(tsk)

    def it():
        yield tasks
        while 1:
            yield []

    bld.producer = p = Runner.Parallel(bld, Options.options.jobs)
    bld.multicheck_lock = Utils.threading.Lock()
    p.biter = it()

    self.end_msg('started')
    p.start()

    # flush the logs in order into the config.log
    for x in tasks:
        x.logger.memhandler.flush()

    self.start_msg('-> processing test results')
    if p.error:
        for x in p.error:
            if getattr(x, 'err_msg', None):
                self.to_log(x.err_msg)
                self.end_msg('fail', color='RED')
                raise Errors.WafError(
                    'There is an error in the library, read config.log for more information'
                )

    failure_count = 0
    for x in tasks:
        if x.hasrun not in (Task.SUCCESS, Task.NOT_RUN):
            failure_count += 1

    if failure_count:
        self.end_msg(kw.get('errmsg', '%s test failed' % failure_count),
                     color='YELLOW',
                     **kw)
    else:
        self.end_msg('all ok', **kw)

    for x in tasks:
        if x.hasrun != Task.SUCCESS:
            if x.args.get('mandatory', True):
                self.fatal(
                    kw.get('fatalmsg') or
                    'One of the tests has failed, read config.log for more information'
                )
 def post(self):
     if getattr(self, 'posted', None):
         return False
     self.posted = True
     keys = set(self.meths)
     keys.update(feats['*'])
     self.features = Utils.to_list(self.features)
     for x in self.features:
         st = feats[x]
         if st:
             keys.update(st)
         elif not x in Task.classes:
             Logs.warn(
                 'feature %r does not exist - bind at least one method to it?',
                 x)
     prec = {}
     prec_tbl = self.prec
     for x in prec_tbl:
         if x in keys:
             prec[x] = prec_tbl[x]
     tmp = []
     for a in keys:
         for x in prec.values():
             if a in x:
                 break
         else:
             tmp.append(a)
     tmp.sort()
     out = []
     while tmp:
         e = tmp.pop()
         if e in keys:
             out.append(e)
         try:
             nlst = prec[e]
         except KeyError:
             pass
         else:
             del prec[e]
             for x in nlst:
                 for y in prec:
                     if x in prec[y]:
                         break
                 else:
                     tmp.append(x)
     if prec:
         buf = ['Cycle detected in the method execution:']
         for k, v in prec.items():
             buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
         raise Errors.WafError('\n'.join(buf))
     out.reverse()
     self.meths = out
     Logs.debug('task_gen: posting %s %d', self, id(self))
     for x in out:
         try:
             v = getattr(self, x)
         except AttributeError:
             raise Errors.WafError(
                 '%r is not a valid task generator method' % x)
         Logs.debug('task_gen: -> %s (%d)', x, id(self))
         v()
     Logs.debug('task_gen: posted %s', self.name)
     return True
Beispiel #29
0
def process_subst(self):
    """
	Defines a transformation that substitutes the contents of *source* files to *target* files::

		def build(bld):
			bld(
				features='subst',
				source='foo.c.in',
				target='foo.c',
				install_path='${LIBDIR}/pkgconfig',
				VAR = 'val'
			)

	The input files are supposed to contain macros of the form *@VAR@*, where *VAR* is an argument
	of the task generator object.

	This method overrides the processing by :py:meth:`waflib.TaskGen.process_source`.
	"""

    src = Utils.to_list(getattr(self, 'source', []))
    if isinstance(src, Node.Node):
        src = [src]
    tgt = Utils.to_list(getattr(self, 'target', []))
    if isinstance(tgt, Node.Node):
        tgt = [tgt]
    if len(src) != len(tgt):
        raise Errors.WafError('invalid number of source/target for %r' % self)

    for x, y in zip(src, tgt):
        if not x or not y:
            raise Errors.WafError('null source or target for %r' % self)
        a, b = None, None

        if isinstance(x, str) and isinstance(y, str) and x == y:
            a = self.path.find_node(x)
            b = self.path.get_bld().make_node(y)
            if not os.path.isfile(b.abspath()):
                b.parent.mkdir()
        else:
            if isinstance(x, str):
                a = self.path.find_resource(x)
            elif isinstance(x, Node.Node):
                a = x
            if isinstance(y, str):
                b = self.path.find_or_declare(y)
            elif isinstance(y, Node.Node):
                b = y

        if not a:
            raise Errors.WafError('could not find %r for %r' % (x, self))

        tsk = self.create_task('subst', a, b)
        for k in ('after', 'before', 'ext_in', 'ext_out'):
            val = getattr(self, k, None)
            if val:
                setattr(tsk, k, val)

        # paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies
        for xt in HEADER_EXTS:
            if b.name.endswith(xt):
                tsk.ext_in = tsk.ext_in + ['.h']
                break

        inst_to = getattr(self, 'install_path', None)
        if inst_to:
            self.install_task = self.add_install_files(install_to=inst_to,
                                                       install_from=b,
                                                       chmod=getattr(
                                                           self, 'chmod',
                                                           Utils.O644))

    self.source = []
Beispiel #30
0
def process_settings(self):
	"""
	Processes the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The
	same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks.

	"""
	enums_tgt_node = []
	install_files = []

	settings_schema_files = getattr(self, 'settings_schema_files', [])
	if settings_schema_files and not self.env.GLIB_COMPILE_SCHEMAS:
		raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")

	# 1. process gsettings_enum_files (generate .enums.xml)
	#
	if hasattr(self, 'settings_enum_files'):
		enums_task = self.create_task('glib_mkenums')

		source_list = self.settings_enum_files
		source_list = [self.path.find_resource(k) for k in source_list]
		enums_task.set_inputs(source_list)
		enums_task.env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]

		target = self.settings_enum_namespace + '.enums.xml'
		tgt_node = self.path.find_or_declare(target)
		enums_task.set_outputs(tgt_node)
		enums_task.env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
		enums_tgt_node = [tgt_node]

		install_files.append(tgt_node)

		options = '--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead "  <@type@ id=\\"%s.@EnumName@\\">" --vprod "    <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail "  </@type@>" --ftail "</schemalist>" ' % (self.settings_enum_namespace)
		enums_task.env.GLIB_MKENUMS_OPTIONS = options

	# 2. process gsettings_schema_files (validate .gschema.xml files)
	#
	for schema in settings_schema_files:
		schema_task = self.create_task ('glib_validate_schema')

		schema_node = self.path.find_resource(schema)
		if not schema_node:
			raise Errors.WafError("Cannot find the schema file %r" % schema)
		install_files.append(schema_node)
		source_list = enums_tgt_node + [schema_node]

		schema_task.set_inputs (source_list)
		schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS = [("--schema-file=" + k.abspath()) for k in source_list]

		target_node = schema_node.change_ext('.xml.valid')
		schema_task.set_outputs (target_node)
		schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT = target_node.abspath()

	# 3. schemas install task
	def compile_schemas_callback(bld):
		if not bld.is_install:
			return
		compile_schemas = Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS)
		destdir = Options.options.destdir
		paths = bld._compile_schemas_registered
		if destdir:
			paths = (os.path.join(destdir, path.lstrip(os.sep)) for path in paths)
		for path in paths:
			Logs.pprint('YELLOW', 'Updating GSettings schema cache %r' % path)
			if self.bld.exec_command(compile_schemas + [path]):
				Logs.warn('Could not update GSettings schema cache %r' % path)

	if self.bld.is_install:
		schemadir = self.env.GSETTINGSSCHEMADIR
		if not schemadir:
			raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')

		if install_files:
			self.add_install_files(install_to=schemadir, install_from=install_files)
			registered_schemas = getattr(self.bld, '_compile_schemas_registered', None)
			if not registered_schemas:
				registered_schemas = self.bld._compile_schemas_registered = set()
				self.bld.add_post_fun(compile_schemas_callback)
			registered_schemas.add(schemadir)