def PUBLIC_HEADERS(bld, public_headers, header_path=None, public_headers_install=True):
    '''install some headers

    header_path may either be a string that is added to the INCLUDEDIR,
    or it can be a dictionary of wildcard patterns which map to destination
    directories relative to INCLUDEDIR
    '''
    bld.SET_BUILD_GROUP('final')

    if not bld.env.build_public_headers:
        # in this case no header munging neeeded. Used for tdb, talloc etc
        public_headers_simple(bld, public_headers, header_path=header_path,
                              public_headers_install=public_headers_install)
        return

    # create the public header in the given path
    # in the build tree
    for h in TO_LIST(public_headers):
        inst_path = header_install_path(h, header_path)
        if h.find(':') != -1:
            s = h.split(":")
            h_name =  s[0]
            inst_name = s[1]
        else:
            h_name =  h
            inst_name = os.path.basename(h)
        curdir = bld.path.abspath()
        relpath1 = os_path_relpath(bld.srcnode.abspath(), curdir)
        relpath2 = os_path_relpath(curdir, bld.srcnode.abspath())
        targetdir = os.path.normpath(os.path.join(relpath1, bld.env.build_public_headers, inst_path))
        if not os.path.exists(os.path.join(curdir, targetdir)):
            raise Errors.WafError("missing source directory %s for public header %s" % (targetdir, inst_name))
        target = os.path.join(targetdir, inst_name)

        # the source path of the header, relative to the top of the source tree
        src_path = os.path.normpath(os.path.join(relpath2, h_name))

        # the install path of the header, relative to the public include directory
        target_path = os.path.normpath(os.path.join(inst_path, inst_name))

        header_map[src_path] = target_path

        t = bld.SAMBA_GENERATOR('HEADER_%s/%s/%s' % (relpath2, inst_path, inst_name),
                                group='headers',
                                rule=create_public_header,
                                source=h_name,
                                target=target)
        t.env.RELPATH = relpath2
        t.env.TOPDIR  = bld.srcnode.abspath()
        if not bld.env.public_headers_list:
            bld.env.public_headers_list = []
        bld.env.public_headers_list.append(os.path.join(inst_path, inst_name))
        if public_headers_install:
            bld.INSTALL_FILES('${INCLUDEDIR}',
                              target,
                              destname=os.path.join(inst_path, inst_name), flat=True)
Exemple #2
0
def PUBLIC_HEADERS(bld, public_headers, header_path=None, public_headers_install=True):
    '''install some headers

    header_path may either be a string that is added to the INCLUDEDIR,
    or it can be a dictionary of wildcard patterns which map to destination
    directories relative to INCLUDEDIR
    '''
    bld.SET_BUILD_GROUP('final')

    if not bld.env.build_public_headers:
        # in this case no header munging neeeded. Used for tdb, talloc etc
        public_headers_simple(bld, public_headers, header_path=header_path,
                              public_headers_install=public_headers_install)
        return

    # create the public header in the given path
    # in the build tree
    for h in TO_LIST(public_headers):
        inst_path = header_install_path(h, header_path)
        if h.find(':') != -1:
            s = h.split(":")
            h_name =  s[0]
            inst_name = s[1]
        else:
            h_name =  h
            inst_name = os.path.basename(h)
        relpath1 = os_path_relpath(bld.srcnode.abspath(), bld.curdir)
        relpath2 = os_path_relpath(bld.curdir, bld.srcnode.abspath())
        targetdir = os.path.normpath(os.path.join(relpath1, bld.env.build_public_headers, inst_path))
        if not os.path.exists(os.path.join(bld.curdir, targetdir)):
            raise Utils.WafError("missing source directory {0!s} for public header {1!s}".format(targetdir, inst_name))
        target = os.path.join(targetdir, inst_name)

        # the source path of the header, relative to the top of the source tree
        src_path = os.path.normpath(os.path.join(relpath2, h_name))

        # the install path of the header, relative to the public include directory
        target_path = os.path.normpath(os.path.join(inst_path, inst_name))

        header_map[src_path] = target_path

        t = bld.SAMBA_GENERATOR('HEADER_{0!s}/{1!s}/{2!s}'.format(relpath2, inst_path, inst_name),
                                group='headers',
                                rule=create_public_header,
                                source=h_name,
                                target=target)
        t.env.RELPATH = relpath2
        t.env.TOPDIR  = bld.srcnode.abspath()
        if not bld.env.public_headers_list:
            bld.env.public_headers_list = []
        bld.env.public_headers_list.append(os.path.join(inst_path, inst_name))
        if public_headers_install:
            bld.INSTALL_FILES('${INCLUDEDIR}',
                              target,
                              destname=os.path.join(inst_path, inst_name), flat=True)
Exemple #3
0
def run_named_build_task(cmd):
    '''run a named build task, matching the cmd name using fnmatch
    wildcards against inputs and outputs of all build tasks'''
    bld = fake_build_environment(info=False)
    found = False
    cwd_node = bld.root.find_dir(os.getcwd())
    top_node = bld.root.find_dir(bld.srcnode.abspath())

    cmd = os.path.normpath(cmd)

    # cope with builds of bin/*/*
    if os.path.islink(cmd):
        cmd = os_path_relpath(os.readlink(cmd), os.getcwd())

    if cmd[0:12] == "bin/default/":
        cmd = cmd[12:]

    for g in bld.task_manager.groups:
        for attr in ['outputs', 'inputs']:
            for t in g.tasks:
                s = getattr(t, attr, [])
                for k in s:
                    relpath1 = k.relpath_gen(cwd_node)
                    relpath2 = k.relpath_gen(top_node)
                    if (fnmatch.fnmatch(relpath1, cmd) or
                        fnmatch.fnmatch(relpath2, cmd)):
                        t.position = [0,0]
                        print(t.display())
                        run_task(t, k)
                        found = True


    if not found:
        raise Utils.WafError("Unable to find build target matching %s" % cmd)
Exemple #4
0
def run_named_build_task(cmd):
    '''run a named build task, matching the cmd name using fnmatch
    wildcards against inputs and outputs of all build tasks'''
    bld = fake_build_environment(info=False)
    found = False
    cwd_node = bld.root.find_dir(os.getcwd())
    top_node = bld.root.find_dir(bld.srcnode.abspath())

    cmd = os.path.normpath(cmd)

    # cope with builds of bin/*/*
    if os.path.islink(cmd):
        cmd = os_path_relpath(os.readlink(cmd), os.getcwd())

    if cmd[0:12] == "bin/default/":
        cmd = cmd[12:]

    for g in bld.task_manager.groups:
        for attr in ['outputs', 'inputs']:
            for t in g.tasks:
                s = getattr(t, attr, [])
                for k in s:
                    relpath1 = k.relpath_gen(cwd_node)
                    relpath2 = k.relpath_gen(top_node)
                    if (fnmatch.fnmatch(relpath1, cmd)
                            or fnmatch.fnmatch(relpath2, cmd)):
                        t.position = [0, 0]
                        print(t.display())
                        run_task(t, k)
                        found = True

    if not found:
        raise Errors.WafError("Unable to find build target matching %s" % cmd)
Exemple #5
0
def s3_fix_kwargs(bld, kwargs):
    '''fix the build arguments for s3 build rules to include the
    necessary includes, subdir and cflags options '''
    s3dir = os.path.join(bld.env.srcdir, 'source3')
    s3reldir = os_path_relpath(s3dir, bld.curdir)

    # the extra_includes list is relative to the source3 directory
    extra_includes = ['.', 'include', 'lib', '../lib/tdb_compat']
    # local heimdal paths only included when USING_SYSTEM_KRB5 is not set
    if not bld.CONFIG_SET("USING_SYSTEM_KRB5"):
        extra_includes += [
            '../source4/heimdal/lib/com_err', '../source4/heimdal/lib/krb5',
            '../source4/heimdal/lib/gssapi', '../source4/heimdal_build',
            '../bin/default/source4/heimdal/lib/asn1'
        ]

    if bld.CONFIG_SET('USING_SYSTEM_TDB'):
        (tdb_includes, tdb_ldflags, tdb_cpppath) = library_flags(bld, 'tdb')
        extra_includes += tdb_cpppath
    else:
        extra_includes += ['../lib/tdb/include']

    if bld.CONFIG_SET('USING_SYSTEM_TEVENT'):
        (tevent_includes, tevent_ldflags,
         tevent_cpppath) = library_flags(bld, 'tevent')
        extra_includes += tevent_cpppath
    else:
        extra_includes += ['../lib/tevent']

    if bld.CONFIG_SET('USING_SYSTEM_TALLOC'):
        (talloc_includes, talloc_ldflags,
         talloc_cpppath) = library_flags(bld, 'talloc')
        extra_includes += talloc_cpppath
    else:
        extra_includes += ['../lib/talloc']

    if bld.CONFIG_SET('USING_SYSTEM_POPT'):
        (popt_includes, popt_ldflags,
         popt_cpppath) = library_flags(bld, 'popt')
        extra_includes += popt_cpppath
    else:
        extra_includes += ['../lib/popt']

    if bld.CONFIG_SET('USING_SYSTEM_INIPARSER'):
        (iniparser_includes, iniparser_ldflags,
         iniparser_cpppath) = library_flags(bld, 'iniparser')
        extra_includes += iniparser_cpppath
    else:
        extra_includes += ['../lib/iniparser']

    # s3 builds assume that they will have a bunch of extra include paths
    includes = []
    for d in extra_includes:
        includes += [os.path.join(s3reldir, d)]

    # the rule may already have some includes listed
    if 'includes' in kwargs:
        includes += TO_LIST(kwargs['includes'])
    kwargs['includes'] = includes
Exemple #6
0
def s3_fix_kwargs(bld, kwargs):
    '''fix the build arguments for s3 build rules to include the
    necessary includes, subdir and cflags options '''
    s3dir = os.path.join(bld.env.srcdir, 'source3')
    s3reldir = os_path_relpath(s3dir, bld.curdir)

    # the extra_includes list is relative to the source3 directory
    extra_includes = [ '.', 'include', 'lib', '../lib/tdb_compat' ]
    # local heimdal paths only included when USING_SYSTEM_KRB5 is not set
    if not bld.CONFIG_SET("USING_SYSTEM_KRB5"):
        extra_includes += [ '../source4/heimdal/lib/com_err',
                            '../source4/heimdal/lib/krb5',
                            '../source4/heimdal/lib/gssapi',
                            '../source4/heimdal_build',
                            '../bin/default/source4/heimdal/lib/asn1' ]

    if bld.CONFIG_SET('USING_SYSTEM_TDB'):
        (tdb_includes, tdb_ldflags, tdb_cpppath) = library_flags(bld, 'tdb')
        extra_includes += tdb_cpppath
    else:
        extra_includes += [ '../lib/tdb/include' ]

    if bld.CONFIG_SET('USING_SYSTEM_TEVENT'):
        (tevent_includes, tevent_ldflags, tevent_cpppath) = library_flags(bld, 'tevent')
        extra_includes += tevent_cpppath
    else:
        extra_includes += [ '../lib/tevent' ]

    if bld.CONFIG_SET('USING_SYSTEM_TALLOC'):
        (talloc_includes, talloc_ldflags, talloc_cpppath) = library_flags(bld, 'talloc')
        extra_includes += talloc_cpppath
    else:
        extra_includes += [ '../lib/talloc' ]

    if bld.CONFIG_SET('USING_SYSTEM_POPT'):
        (popt_includes, popt_ldflags, popt_cpppath) = library_flags(bld, 'popt')
        extra_includes += popt_cpppath
    else:
        extra_includes += [ '../lib/popt' ]

    if bld.CONFIG_SET('USING_SYSTEM_INIPARSER'):
        (iniparser_includes, iniparser_ldflags, iniparser_cpppath) = library_flags(bld, 'iniparser')
        extra_includes += iniparser_cpppath
    else:
        extra_includes += [ '../lib/iniparser' ]

    # s3 builds assume that they will have a bunch of extra include paths
    includes = []
    for d in extra_includes:
        includes += [ os.path.join(s3reldir, d) ]

    # the rule may already have some includes listed
    if 'includes' in kwargs:
        includes += TO_LIST(kwargs['includes'])
    kwargs['includes'] = includes
Exemple #7
0
def check_duplicate_sources(bld, tgt_list):
    '''see if we are compiling the same source file more than once
       without an allow_duplicates attribute'''

    debug('deps: checking for duplicate sources')

    targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
    ret = True

    global tstart

    for t in tgt_list:
        source_list = TO_LIST(getattr(t, 'source', ''))
        tpath = os.path.normpath(
            os_path_relpath(t.path.abspath(bld.env),
                            t.env.BUILD_DIRECTORY + '/default'))
        obj_sources = set()
        for s in source_list:
            p = os.path.normpath(os.path.join(tpath, s))
            if p in obj_sources:
                Logs.error("ERROR: source %s appears twice in target '%s'" %
                           (p, t.sname))
                sys.exit(1)
            obj_sources.add(p)
        t.samba_source_set = obj_sources

    subsystems = {}

    # build a list of targets that each source file is part of
    for t in tgt_list:
        sources = []
        if not targets[t.sname] in ['LIBRARY', 'BINARY', 'PYTHON']:
            continue
        for obj in t.add_objects:
            t2 = t.bld.get_tgen_by_name(obj)
            source_set = getattr(t2, 'samba_source_set', set())
            for s in source_set:
                if not s in subsystems:
                    subsystems[s] = {}
                if not t.sname in subsystems[s]:
                    subsystems[s][t.sname] = []
                subsystems[s][t.sname].append(t2.sname)

    for s in subsystems:
        if len(subsystems[s]) > 1 and Options.options.SHOW_DUPLICATES:
            Logs.warn("WARNING: source %s is in more than one target: %s" %
                      (s, subsystems[s].keys()))
        for tname in subsystems[s]:
            if len(subsystems[s][tname]) > 1:
                raise Utils.WafError(
                    "ERROR: source %s is in more than one subsystem of target '%s': %s"
                    % (s, tname, subsystems[s][tname]))

    return ret
Exemple #8
0
def symbols_dupcheck(task, fail_on_error=False):
    '''check for symbols defined in two different subsystems'''
    bld = task.env.bld
    tgt_list = get_tgt_list(bld)

    targets = LOCAL_CACHE(bld, 'TARGET_TYPE')

    build_library_dict(bld, tgt_list)
    for t in tgt_list:
        if t.samba_type == 'BINARY':
            binname = os_path_relpath(t.link_task.outputs[0].abspath(bld.env), os.getcwd())
            symbols_dupcheck_binary(bld, binname, fail_on_error)
Exemple #9
0
def symbols_dupcheck(task, fail_on_error=False):
    '''check for symbols defined in two different subsystems'''
    bld = task.env.bld
    tgt_list = get_tgt_list(bld)

    targets = LOCAL_CACHE(bld, 'TARGET_TYPE')

    build_library_dict(bld, tgt_list)
    for t in tgt_list:
        if t.samba_type == 'BINARY':
            binname = os_path_relpath(t.link_task.outputs[0].abspath(bld.env), os.getcwd())
            symbols_dupcheck_binary(bld, binname, fail_on_error)
def SAMBA_AUTOPROTO(bld, header, source):
    '''rule for samba prototype generation'''
    bld.SET_BUILD_GROUP('prototypes')
    relpath = os_path_relpath(bld.path.abspath(), bld.srcnode.abspath())
    name = os.path.join(relpath, header)
    SET_TARGET_TYPE(bld, name, 'PROTOTYPE')
    t = bld(
        name = name,
        source = source,
        target = header,
        update_outputs=True,
        ext_out='.c',
        before ='c',
        rule = '${PERL} "${SCRIPT}/mkproto.pl" --srcdir=.. --builddir=. --public=/dev/null --private="${TGT}" ${SRC}'
        )
    t.env.SCRIPT = os.path.join(bld.srcnode.abspath(), 'source4/script')
Exemple #11
0
def SAMBA_AUTOPROTO(bld, header, source):
    '''rule for samba prototype generation'''
    bld.SET_BUILD_GROUP('prototypes')
    relpath = os_path_relpath(bld.curdir, bld.srcnode.abspath())
    name = os.path.join(relpath, header)
    SET_TARGET_TYPE(bld, name, 'PROTOTYPE')
    t = bld(
        name = name,
        source = source,
        target = header,
        update_outputs=True,
        ext_out='.c',
        before ='cc',
        rule = '${PERL} "${SCRIPT}/mkproto.pl" --srcdir=.. --builddir=. --public=/dev/null --private="${TGT}" ${SRC}'
        )
    t.env.SCRIPT = os.path.join(bld.srcnode.abspath(), 'source4/script')
Exemple #12
0
def s3_fix_kwargs(bld, kwargs):
    """fix the build arguments for s3 build rules to include the
    necessary includes, subdir and cflags options """
    s3dir = os.path.join(bld.env.srcdir, "source3")
    s3reldir = os_path_relpath(s3dir, bld.curdir)

    # the extra_includes list is relative to the source3 directory
    extra_includes = [".", "include", "lib", "../lib/tdb_compat"]
    if not bld.CONFIG_SET("USING_SYSTEM_KRB5"):
        extra_includes += [
            "../source4/heimdal/lib/com_err",
            "../source4/heimdal/lib/gssapi",
            "../source4/heimdal_build",
        ]

    if bld.CONFIG_SET("BUILD_TDB2"):
        if not bld.CONFIG_SET("USING_SYSTEM_TDB2"):
            extra_includes += ["../lib/tdb2"]
    else:
        if not bld.CONFIG_SET("USING_SYSTEM_TDB"):
            extra_includes += ["../lib/tdb/include"]

    if not bld.CONFIG_SET("USING_SYSTEM_TEVENT"):
        extra_includes += ["../lib/tevent"]

    if not bld.CONFIG_SET("USING_SYSTEM_TALLOC"):
        extra_includes += ["../lib/talloc"]

    if not bld.CONFIG_SET("USING_SYSTEM_POPT"):
        extra_includes += ["../lib/popt"]

    # s3 builds assume that they will have a bunch of extra include paths
    includes = []
    for d in extra_includes:
        includes += [os.path.join(s3reldir, d)]

    # the rule may already have some includes listed
    if "includes" in kwargs:
        includes += TO_LIST(kwargs["includes"])
    kwargs["includes"] = includes

    # some S3 code assumes that CONFIGFILE is set
    cflags = ['-DCONFIGFILE="%s"' % bld.env["CONFIGFILE"]]
    if "cflags" in kwargs:
        cflags += TO_LIST(kwargs["cflags"])
    kwargs["cflags"] = cflags
Exemple #13
0
def s3_fix_kwargs(bld, kwargs):
    '''fix the build arguments for s3 build rules to include the
	necessary includes, subdir and cflags options '''
    s3dir = os.path.join(bld.env.srcdir, 'source3')
    s3reldir = os_path_relpath(s3dir, bld.curdir)

    # the extra_includes list is relative to the source3 directory
    extra_includes = ['.', 'include', 'lib', '../lib/tdb_compat']
    if not bld.CONFIG_SET("USING_SYSTEM_KRB5"):
        extra_includes += [
            '../source4/heimdal/lib/com_err', '../source4/heimdal/lib/gssapi',
            '../source4/heimdal_build'
        ]

    if bld.CONFIG_SET('BUILD_TDB2'):
        if not bld.CONFIG_SET('USING_SYSTEM_TDB2'):
            extra_includes += ['../lib/tdb2']
    else:
        if not bld.CONFIG_SET('USING_SYSTEM_TDB'):
            extra_includes += ['../lib/tdb/include']

    if not bld.CONFIG_SET('USING_SYSTEM_TEVENT'):
        extra_includes += ['../lib/tevent']

    if not bld.CONFIG_SET('USING_SYSTEM_TALLOC'):
        extra_includes += ['../lib/talloc']

    if not bld.CONFIG_SET('USING_SYSTEM_POPT'):
        extra_includes += ['../lib/popt']

    # s3 builds assume that they will have a bunch of extra include paths
    includes = []
    for d in extra_includes:
        includes += [os.path.join(s3reldir, d)]

    # the rule may already have some includes listed
    if 'includes' in kwargs:
        includes += TO_LIST(kwargs['includes'])
    kwargs['includes'] = includes

    # some S3 code assumes that CONFIGFILE is set
    cflags = ['-DCONFIGFILE="%s"' % bld.env['CONFIGFILE']]
    if 'cflags' in kwargs:
        cflags += TO_LIST(kwargs['cflags'])
    kwargs['cflags'] = cflags
def vcs_dir_contents(path):
    """Return the versioned files under a path.

    :return: List of paths relative to path
    """
    repo = path
    while repo != "/":
        if os.path.isdir(os.path.join(repo, ".git")):
            ls_files_cmd = [ 'git', 'ls-files', '--full-name',
                             os_path_relpath(path, repo) ]
            cwd = None
            env = dict(os.environ)
            env["GIT_DIR"] = os.path.join(repo, ".git")
            break
        repo = os.path.dirname(repo)
    if repo == "/":
        raise Exception("unsupported or no vcs for %s" % path)
    return Utils.cmd_output(ls_files_cmd, cwd=cwd, env=env).split()
Exemple #15
0
def vcs_dir_contents(path):
    """Return the versioned files under a path.

    :return: List of paths relative to path
    """
    repo = path
    while repo != "/":
        if os.path.isdir(os.path.join(repo, ".git")):
            ls_files_cmd = [ 'git', 'ls-files', '--full-name',
                             os_path_relpath(path, repo) ]
            cwd = None
            env = dict(os.environ)
            env["GIT_DIR"] = os.path.join(repo, ".git")
            break
        repo = os.path.dirname(repo)
    if repo == "/":
        raise Exception("unsupported or no vcs for %s" % path)
    return Utils.cmd_output(ls_files_cmd, cwd=cwd, env=env).split()
Exemple #16
0
def s3_fix_kwargs(bld, kwargs):
    '''fix the build arguments for s3 build rules to include the
	necessary includes, subdir and cflags options '''
    s3dir = os.path.join(bld.env.srcdir, 'source3')
    s3reldir = os_path_relpath(s3dir, bld.curdir)

    # the extra_includes list is relative to the source3 directory
    extra_includes = [ '.', 'include', 'lib', '../lib/tdb_compat' ]
    if bld.env.use_intree_heimdal:
        extra_includes += [ '../source4/heimdal/lib/com_err',
                            '../source4/heimdal/lib/gssapi',
                            '../source4/heimdal_build' ]

    if bld.CONFIG_SET('BUILD_TDB2'):
        if not bld.CONFIG_SET('USING_SYSTEM_TDB2'):
            extra_includes += [ '../lib/tdb2' ]
    else:
        if not bld.CONFIG_SET('USING_SYSTEM_TDB'):
            extra_includes += [ '../lib/tdb/include' ]

    if not bld.CONFIG_SET('USING_SYSTEM_TEVENT'):
        extra_includes += [ '../lib/tevent' ]

    if not bld.CONFIG_SET('USING_SYSTEM_TALLOC'):
        extra_includes += [ '../lib/talloc' ]

    if not bld.CONFIG_SET('USING_SYSTEM_POPT'):
        extra_includes += [ '../lib/popt' ]

    # s3 builds assume that they will have a bunch of extra include paths
    includes = []
    for d in extra_includes:
        includes += [ os.path.join(s3reldir, d) ]

    # the rule may already have some includes listed
    if 'includes' in kwargs:
        includes += TO_LIST(kwargs['includes'])
    kwargs['includes'] = includes

    # some S3 code assumes that CONFIGFILE is set
    cflags = ['-DCONFIGFILE="%s"' % bld.env['CONFIGFILE']]
    if 'cflags' in kwargs:
        cflags += TO_LIST(kwargs['cflags'])
    kwargs['cflags'] = cflags
Exemple #17
0
def check_duplicate_sources(bld, tgt_list):
    '''see if we are compiling the same source file more than once'''

    debug('deps: checking for duplicate sources')
    targets = LOCAL_CACHE(bld, 'TARGET_TYPE')

    for t in tgt_list:
        source_list = TO_LIST(getattr(t, 'source', ''))
        tpath = os.path.normpath(os_path_relpath(t.path.abspath(bld.env), t.env.BUILD_DIRECTORY + '/default'))
        obj_sources = set()
        for s in source_list:
            p = os.path.normpath(os.path.join(tpath, s))
            if p in obj_sources:
                Logs.error("ERROR: source %s appears twice in target '%s'" % (p, t.sname))
                sys.exit(1)
            obj_sources.add(p)
        t.samba_source_set = obj_sources

    subsystems = {}

    # build a list of targets that each source file is part of
    for t in tgt_list:
        if not targets[t.sname] in [ 'LIBRARY', 'BINARY', 'PYTHON' ]:
            continue
        for obj in t.add_objects:
            t2 = t.bld.get_tgen_by_name(obj)
            source_set = getattr(t2, 'samba_source_set', set())
            for s in source_set:
                if not s in subsystems:
                    subsystems[s] = {}
                if not t.sname in subsystems[s]:
                    subsystems[s][t.sname] = []
                subsystems[s][t.sname].append(t2.sname)

    for s in subsystems:
        if len(subsystems[s]) > 1 and Options.options.SHOW_DUPLICATES:
            Logs.warn("WARNING: source %s is in more than one target: %s" % (s, subsystems[s].keys()))
        for tname in subsystems[s]:
            if len(subsystems[s][tname]) > 1:
                raise Utils.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname]))

    return True
def create_public_header(task):
    '''create a public header from a private one, output within the build tree'''
    src = task.inputs[0].abspath(task.env)
    tgt = task.outputs[0].bldpath(task.env)

    if os.path.exists(tgt):
        os.unlink(tgt)

    relsrc = os_path_relpath(src, task.env.TOPDIR)

    infile = open(src, mode='r')
    outfile = open(tgt, mode='w')
    linenumber = 0

    search_paths = ['', task.env.RELPATH]
    for i in task.env.EXTRA_INCLUDES:
        if i.startswith('#'):
            search_paths.append(i[1:])

    for line in infile:
        linenumber += 1

        # allow some straight substitutions
        if task.env.public_headers_replace and line.strip(
        ) in task.env.public_headers_replace:
            outfile.write(task.env.public_headers_replace[line.strip()] + '\n')
            continue

        # see if its an include line
        m = re_header.match(line)
        if m is None:
            outfile.write(line)
            continue

        # its an include, get the header path
        hpath = m.group(1)
        if hpath.startswith("bin/default/"):
            hpath = hpath[12:]

        # some are always allowed
        if task.env.public_headers_skip and hpath in task.env.public_headers_skip:
            outfile.write(line)
            continue

        # work out the header this refers to
        found = False
        for s in search_paths:
            p = os.path.normpath(os.path.join(s, hpath))
            if p in header_map:
                outfile.write("#include <%s>\n" % header_map[p])
                found = True
                break
        if found:
            continue

        if task.env.public_headers_allow_broken:
            Logs.warn("Broken public header include '%s' in '%s'" %
                      (hpath, relsrc))
            outfile.write(line)
            continue

        # try to be nice to the developer by suggesting an alternative
        suggested = find_suggested_header(hpath)
        outfile.close()
        os.unlink(tgt)
        sys.stderr.write(
            "%s:%u:Error: unable to resolve public header %s (maybe try one of %s)\n"
            %
            (os.path.relpath(src, os.getcwd()), linenumber, hpath, suggested))
        raise Utils.WafError(
            "Unable to resolve header path '%s' in public header '%s' in directory %s"
            % (hpath, relsrc, task.env.RELPATH))
    infile.close()
    outfile.close()
Exemple #19
0
def build_includes(self):
    '''This builds the right set of includes for a target.

    One tricky part of this is that the includes= attribute for a
    target needs to use paths which are relative to that targets
    declaration directory (which we can get at via t.path).

    The way this works is the includes list gets added as
    samba_includes in the main build task declaration. Then this
    function runs after all of the tasks are declared, and it
    processes the samba_includes attribute to produce a includes=
    attribute
    '''

    if getattr(self, 'samba_includes', None) is None:
        return

    bld = self.bld

    inc_deps = includes_objects(bld, self, set(), {})

    includes = []

    # maybe add local includes
    if getattr(self, 'local_include', True) and getattr(self, 'local_include_first', True):
        includes.append('.')

    includes.extend(self.samba_includes_extended)

    if 'EXTRA_INCLUDES' in bld.env and getattr(self, 'global_include', True):
        includes.extend(bld.env['EXTRA_INCLUDES'])

    includes.append('#')

    inc_set = set()
    inc_abs = []

    for d in inc_deps:
        t = bld.get_tgen_by_name(d)
        bld.ASSERT(t is not None, "Unable to find dependency %s for %s" % (d, self.sname))
        inclist = getattr(t, 'samba_includes_extended', [])[:]
        if getattr(t, 'local_include', True):
            inclist.append('.')
        if inclist == []:
            continue
        tpath = t.samba_abspath
        for inc in inclist:
            npath = tpath + '/' + inc
            if not npath in inc_set:
                inc_abs.append(npath)
                inc_set.add(npath)

    mypath = self.path.abspath(bld.env)
    for inc in inc_abs:
        relpath = os_path_relpath(inc, mypath)
        includes.append(relpath)

    if getattr(self, 'local_include', True) and not getattr(self, 'local_include_first', True):
        includes.append('.')

    # now transform the includes list to be relative to the top directory
    # which is represented by '#' in waf. This allows waf to cache the
    # includes lists more efficiently
    includes_top = []
    for i in includes:
        if i[0] == '#':
            # some are already top based
            includes_top.append(i)
            continue
        absinc = os.path.join(self.path.abspath(), i)
        relinc = os_path_relpath(absinc, self.bld.srcnode.abspath())
        includes_top.append('#' + relinc)

    self.includes = unique_list(includes_top)
    debug('deps: includes for target %s: includes=%s',
          self.sname, self.includes)
Exemple #20
0
def build_includes(self):
    '''This builds the right set of includes for a target.

    One tricky part of this is that the includes= attribute for a
    target needs to use paths which are relative to that targets
    declaration directory (which we can get at via t.path).

    The way this works is the includes list gets added as
    samba_includes in the main build task declaration. Then this
    function runs after all of the tasks are declared, and it
    processes the samba_includes attribute to produce a includes=
    attribute
    '''

    if getattr(self, 'samba_includes', None) is None:
        return

    bld = self.bld

    inc_deps = includes_objects(bld, self, set(), {})

    includes = []

    # maybe add local includes
    if getattr(self, 'local_include', True) and getattr(self, 'local_include_first', True):
        includes.append('.')

    includes.extend(self.samba_includes_extended)

    if 'EXTRA_INCLUDES' in bld.env and getattr(self, 'global_include', True):
        includes.extend(bld.env['EXTRA_INCLUDES'])

    includes.append('#')

    inc_set = set()
    inc_abs = []

    for d in inc_deps:
        t = bld.get_tgen_by_name(d)
        bld.ASSERT(t is not None, "Unable to find dependency %s for %s" % (d, self.sname))
        inclist = getattr(t, 'samba_includes_extended', [])[:]
        if getattr(t, 'local_include', True):
            inclist.append('.')
        if inclist == []:
            continue
        tpath = t.samba_abspath
        for inc in inclist:
            npath = tpath + '/' + inc
            if not npath in inc_set:
                inc_abs.append(npath)
                inc_set.add(npath)

    mypath = self.path.abspath(bld.env)
    for inc in inc_abs:
        relpath = os_path_relpath(inc, mypath)
        includes.append(relpath)

    if getattr(self, 'local_include', True) and not getattr(self, 'local_include_first', True):
        includes.append('.')

    # now transform the includes list to be relative to the top directory
    # which is represented by '#' in waf. This allows waf to cache the
    # includes lists more efficiently
    includes_top = []
    for i in includes:
        if i[0] == '#':
            # some are already top based
            includes_top.append(i)
            continue
        absinc = os.path.join(self.path.abspath(), i)
        relinc = os_path_relpath(absinc, self.bld.srcnode.abspath())
        includes_top.append('#' + relinc)

    self.includes = unique_list(includes_top)
    debug('deps: includes for target %s: includes=%s',
          self.sname, self.includes)
Exemple #21
0
def create_public_header(task):
    '''create a public header from a private one, output within the build tree'''
    src = task.inputs[0].abspath(task.env)
    tgt = task.outputs[0].bldpath(task.env)

    if os.path.exists(tgt):
        os.unlink(tgt)

    relsrc = os_path_relpath(src, task.env.TOPDIR)

    infile  = open(src, mode='r')
    outfile = open(tgt, mode='w')
    linenumber = 0

    search_paths = [ '', task.env.RELPATH ]
    for i in task.env.EXTRA_INCLUDES:
        if i.startswith('#'):
            search_paths.append(i[1:])

    for line in infile:
        linenumber += 1

        # allow some straight substitutions
        if task.env.public_headers_replace and line.strip() in task.env.public_headers_replace:
            outfile.write(task.env.public_headers_replace[line.strip()] + '\n')
            continue

        # see if its an include line
        m = re_header.match(line)
        if m is None:
            outfile.write(line)
            continue

        # its an include, get the header path
        hpath = m.group(1)
        if hpath.startswith("bin/default/"):
            hpath = hpath[12:]

        # some are always allowed
        if task.env.public_headers_skip and hpath in task.env.public_headers_skip:
            outfile.write(line)
            continue

        # work out the header this refers to
        found = False
        for s in search_paths:
            p = os.path.normpath(os.path.join(s, hpath))
            if p in header_map:
                outfile.write("#include <{0!s}>\n".format(header_map[p]))
                found = True
                break
        if found:
            continue

        if task.env.public_headers_allow_broken:
            Logs.warn("Broken public header include '{0!s}' in '{1!s}'".format(hpath, relsrc))
            outfile.write(line)
            continue

        # try to be nice to the developer by suggesting an alternative
        suggested = find_suggested_header(hpath)
        outfile.close()
        os.unlink(tgt)
        sys.stderr.write("{0!s}:{1:d}:Error: unable to resolve public header {2!s} (maybe try one of {3!s})\n".format(
            os.path.relpath(src, os.getcwd()), linenumber, hpath, suggested))
        raise Utils.WafError("Unable to resolve header path '{0!s}' in public header '{1!s}' in directory {2!s}".format(
            hpath, relsrc, task.env.RELPATH))
    infile.close()
    outfile.close()