Пример #1
0
def copy_extra_files(tile):
    """Copy all files listed in a copy_files and copy_products section.

    Files listed in copy_files will be copied from the specified location
    in the current component to the specified path under the output
    folder.

    Files listed in copy_products will be looked up with a ProductResolver
    and copied copied to the specified path in the output folder.  There
    is not currently a way to specify what type of product is being resolved.
    The `short_name` given must be unique across all products from this
    component and its direct dependencies.
    """

    env = Environment(tools=[])
    outputbase = os.path.join('build', 'output')

    for src, dest in tile.settings.get('copy_files', {}).items():
        outputfile = os.path.join(outputbase, dest)
        env.Command([outputfile], [src], Copy("$TARGET", "$SOURCE"))

    resolver = ProductResolver.Create()
    for src, dest in tile.settings.get('copy_products', {}).items():
        prod = resolver.find_unique(None, src)
        outputfile = os.path.join(outputbase, dest)

        env.Command([outputfile], [prod.full_path], Copy("$TARGET", "$SOURCE"))
Пример #2
0
    def _copy_files(self, target_dir):
        """Copy test harness and file-under-test."""

        builder = Builder(action=recursive_template_action,
                          emitter=recursive_template_emitter)

        _inc_dirs, _sources, headers = unit_test.find_sources('firmware/src')

        # Render the template
        env = Environment(tools=[], BUILDERS={'render': builder})
        env['RECURSIVE_TEMPLATE'] = self.UNIT_TEMPLATE
        template_files = env.render([os.path.join(target_dir, '.timestamp')],
                                    [])

        test_files = []
        for infile in self.files:
            test_file = env.Command(
                [os.path.join(target_dir, os.path.basename(infile))], [infile],
                action=Copy("$TARGET", "$SOURCE"))
            test_files.append(test_file)

        # Copy all headers into the unit test
        for _basename, infile in viewitems(headers):
            test_file = env.Command(
                [os.path.join(target_dir, os.path.basename(infile))], [infile],
                action=Copy("$TARGET", "$SOURCE"))
            test_files.append(test_file)

        all_files = template_files + test_files
        c_files = [str(x) for x in all_files if str(x).endswith('.c')]

        return c_files, all_files
Пример #3
0
 def script(name):
     pc_install = '{0}/lib/pkgconfig/{1}.pc'.format(
         installEnv.installPrefix, name)
     pc_copied = Command(build_dir + '/temp.pc.in',
                         'misc/r-tech1.pc.in'.format(name),
                         Copy('$TARGET', '$SOURCE'))
     print installEnv
     pc_script = installEnv.Substfile(build_dir + '/temp.pc.in',
                                      SUBST_DICT=replacelist)
     installEnv.Depends(pc_script, pc_copied)
     pc_mod = Command(build_dir + '/{0}.pc'.format(name),
                      build_dir + '/temp.pc', Copy('$TARGET', '$SOURCE'))
     installEnv.Depends(pc_mod, pc_script)
     installEnv.InstallAs(pc_install, pc_mod)
     return pc_mod, pc_install
Пример #4
0
def SDist(env, target=None, source=None):
    """
    Call env.Package() with sdist filename inferred from
    env['PACKAGE_METADATA'] etc.
    """
    enscons_defaults(env)

    egg_info = env.Command(egg_info_targets(env), "pyproject.toml", egg_info_builder)
    env.Clean(egg_info, env["EGG_INFO_PATH"])
    env.Alias("egg_info", egg_info)

    pkg_info = env.Command(
        "PKG-INFO", egg_info_targets(env)[0].get_path(), Copy("$TARGET", "$SOURCE")
    )

    src_type = "src_targz"

    # also the root directory name inside the archive
    target_prefix = "-".join((env["PACKAGE_NAME"], env["PACKAGE_VERSION"]))
    if not target:
        target = [os.path.join(env["DIST_BASE"], target_prefix)]

    source = sorted(env.arg2nodes(source, env.fs.Entry))

    sdist = env.PyTar(
        target=target,
        source=source,
        TARPREFIX=target_prefix,
        TARSUFFIX=".tar.gz",
        TARUID=0,
        TARGID=0,
        TARMTIME=SOURCE_EPOCH_TGZ,
    )
    return sdist
Пример #5
0
def generate(env):
    if env.WhereIs("yui-compressor"):
        action = "yui-compressor -o $TARGET $SOURCE"
    else:
        action = Copy("$TARGET", "$SOURCE")
    bld = Builder(action=action, suffix=".min.css", src_suffix=".css")
    env.Append(BUILDERS={"MinifyCSS": bld})
Пример #6
0
def create_gsl_cpy_commands(conf, dependencies, copy_folder):
    '''
    Create os dependent commands. On darwin: copy all gsl libs, fix
    the install names for dylibs using install_name_tool, and 
    replace lib path with the patched version. On linux: do nothing
    '''
    if conf.env["SYSTEM"] == "Darwin" and dependencies["gsl"].lib_path:
        lib_path = dependencies["gsl"].lib_path
        commands = []

        for lib in Glob(os.path.join(lib_path, "*")):
            new_path = os.path.join(copy_folder, os.path.basename(lib.rstr()))
            action = [Copy("$TARGET", "$SOURCE")]
            if ("0.dylib" in lib.rstr()):
                action += [fix_dylib_for_darwin]

            kw = {
                'target': '{0}'.format(new_path),
                'source': '{0}'.format(lib),
                'action': action
            }

            commands.append(kw)

        dependencies["gsl"].lib_path = Dir(copy_folder).abspath
        return commands

    else:
        return []
Пример #7
0
def generate(env):
    if env.WhereIs("uglifyjs"):
        action = "uglifyjs --no-copyright --output $TARGET $SOURCE"
    elif env.WhereIs("yui-compressor"):
        action = "yui-compressor -o $TARGET $SOURCE"
    else:
        action = Copy("$TARGET", "$SOURCE")
    bld = Builder(action = action, suffix = ".min.js", src_suffix = ".js")
    env.Append(BUILDERS = { "MinifyJS": bld })
Пример #8
0
def init_wheel(env):
    """
    Create a wheel and its metadata using Environment env.
    """
    env["PACKAGE_NAMEVER"] = "-".join(
        (env["PACKAGE_NAME_SAFE"], env["PACKAGE_VERSION"]))

    wheel_filename = "-".join(
        (env["PACKAGE_NAMEVER"], env["WHEEL_TAG"])) + ".whl"
    wheel_target_dir = env.Dir(env["WHEEL_DIR"])

    # initial # here in path means its relative to top-level sconstruct
    env["WHEEL_PATH"] = env.get("WHEEL_PATH", env.Dir("#build/wheel/"))
    env["DIST_INFO_NAME"] = env["PACKAGE_NAMEVER"] + ".dist-info"

    env["DIST_INFO_PATH"] = env["WHEEL_PATH"].Dir(env["PACKAGE_NAME_SAFE"] +
                                                  "-" +
                                                  env["PACKAGE_VERSION"] +
                                                  ".dist-info")
    env["WHEEL_DATA_PATH"] = env["WHEEL_PATH"].Dir(env["PACKAGE_NAME_SAFE"] +
                                                   "-" +
                                                   env["PACKAGE_VERSION"] +
                                                   ".data")

    # used by prepare_metadata_for_build_wheel
    dist_info = env.Install(env.Dir(env["WHEEL_DIR"]), env["DIST_INFO_PATH"])
    env.Alias("dist_info", dist_info)

    env["WHEEL_FILE"] = env.Dir(wheel_target_dir).File(wheel_filename)

    # Write WHEEL and METADATA
    targets = wheel_metadata(env)

    # experimental PEP517-style editable
    # with filename that won't collide with our real wheel (SCons wouldn't like that)
    editable_filename = ("-".join(
        (env["PACKAGE_NAMEVER"], "ed." + env["WHEEL_TAG"])) + ".whl")
    editable = env.Zip(
        target=env.Dir(env["WHEEL_DIR"]).File(editable_filename),
        source=env["DIST_INFO_PATH"],
        ZIPROOT=env["WHEEL_PATH"],
    )
    env.Alias("editable", editable)
    env.NoClean(editable)
    env.AddPostAction(editable, Action(add_editable))
    env.AddPostAction(editable, Action(add_manifest))

    editable_dist_info = env.Dir(
        "#build/editable/${PACKAGE_NAMEVER}.dist-info")
    # editable may need an extra dependency, so it gets its own dist-info directory.
    env.Command(editable_dist_info, env["DIST_INFO_PATH"],
                Copy("$TARGET", "$SOURCE"))

    metadata2 = env.Command(editable_dist_info.File("METADATA"),
                            metadata_source(env), metadata_builder)

    return targets
Пример #9
0
def copy_tilebus_definitions(tile):
    destdir = os.path.join('build', 'output', 'tilebus')

    env = Environment(tools=[])
    for tbdef in tile.find_products('tilebus_definitions'):
        tbname = os.path.basename(tbdef)

        infile = tbdef
        outfile = os.path.join(destdir, tbname)
        env.Command([outfile], [infile], Copy("$TARGET", "$SOURCE"))
Пример #10
0
def copy_linker_scripts(tile):
    destdir = os.path.join('build', 'output', 'linker')

    linkers = tile.find_products('linker_script')
    env = Environment(tools=[])

    for linker in linkers:
        linkername = os.path.basename(linker)
        srcfile = os.path.join("firmware", 'linker', linkername)
        destfile = os.path.join(destdir, linkername)

        env.Command([destfile], [srcfile], Copy("$TARGET", "$SOURCE"))
Пример #11
0
def generate(env):
    env.Append(CPPPATH=[sysconfig.get_python_inc()])
    env.Append(LIBPATH=[sysconfig.get_config_var('LIBDIR')])
    # LIBS = ['python' + sysconfig.get_config_var('VERSION')] # only on CPython; ask distutils

    compiler = distutils.ccompiler.new_compiler()
    distutils.sysconfig.customize_compiler(compiler)
    if isinstance(compiler, distutils.unixccompiler.UnixCCompiler):
        env.MergeFlags(' '.join(compiler.compiler_so[1:]))
        # XXX other flags are revealed in compiler
    # XXX MSVC works differently

    env['PACKAGE_NAME'] = env['PACKAGE_METADATA']['name']
    env['PACKAGE_NAME_SAFE'] = normalize_package(env['PACKAGE_NAME'])
    env['PACKAGE_VERSION'] = env['PACKAGE_METADATA']['version']

    # Development .egg-info has no version number. Needs to have
    # underscore _ and not hyphen -
    env['EGG_INFO_PATH'] = env['PACKAGE_NAME_SAFE'] + '.egg-info'

    # all files under this directory will be packaged as a wheel
    env['WHEEL_PATH'] = env.Dir('#build/wheel/')

    # this distutils command helps trick setuptools into doing work for us
    command = Command(Distribution(env['PACKAGE_METADATA']))
    egg_info = env.Command(egg_info_targets(env), 'pyproject.toml',
                           egg_info_builder)
    env['DUMMY_COMMAND'] = command

    env.Clean(egg_info, env['EGG_INFO_PATH'])

    env.Alias('egg_info', egg_info)

    metadata = env.Command('METADATA', 'pyproject.toml', metadata_builder)

    pkg_info = env.Command('PKG-INFO',
                           egg_info_targets(env)[0].get_path(),
                           Copy('$TARGET', '$SOURCE'))

    # XXX switch to using FindInstalledFiles() or another collector, so random files
    # in build directory won't wind up in the archive.
    # XXX is this relative to the calling file?
    whl = env.Zip(target='-'.join(
        (env['PACKAGE_NAME_SAFE'], env['PACKAGE_VERSION'], env['WHEEL_TAG'])) +
                  '.whl',
                  source=env['WHEEL_PATH'],
                  ZIPROOT=env['WHEEL_PATH'])

    env.AddPostAction(whl, Action(add_manifest))

    env.Clean(whl, env['WHEEL_PATH'])

    return
Пример #12
0
def copy_dependency_docs(tile):
    """Copy all documentation from dependencies into build/output/doc folder"""

    env = Environment(tools=[])

    outputbase = os.path.join('build', 'output', 'doc')
    depbase = os.path.join('build', 'deps')
    for dep in tile.dependencies:
        depdir = os.path.join(depbase, dep['unique_id'], 'doc',
                              dep['unique_id'])
        outputdir = os.path.join(outputbase, dep['unique_id'])

        if os.path.exists(depdir):
            env.Command([outputdir], [depdir], Copy("$TARGET", "$SOURCE"))
Пример #13
0
    def __call__(self, target, source, env):

        logger.trace("target = [{}]".format(
            colour_items([str(node) for node in target])))
        logger.trace("source = [{}]".format(
            colour_items([str(node) for node in source])))

        for html_report_src_tgt, json_report_src_tgt in zip(
                *[iter(zip(source, target))] * 2):

            html_report = html_report_src_tgt[0]
            json_report = json_report_src_tgt[0]

            html_target = html_report_src_tgt[1]
            json_target = json_report_src_tgt[1]

            logger.trace("html_report = [{}]".format(
                as_notice(str(html_report))))
            logger.trace("json_report = [{}]".format(as_info(
                str(json_report))))
            logger.trace("html_target = [{}]".format(
                as_notice(str(html_target))))
            logger.trace("json_target = [{}]".format(as_info(
                str(json_target))))

            # TODO: Check use of destination as it is currently unused
            destination = env['abs_final_dir']
            if self._destination:
                destination = self._destination + destination_subdir(env)

            logger.trace("report_summary = {}".format(
                str(self._read(str(json_report)))))

            env.Execute(Copy(html_target, html_report))
            env.Execute(Copy(json_target, json_report))

        return None
Пример #14
0
 def package(self):
     self.env.Substfile(self.outdir.File("AndroidManifest.xml"),
                        self.template_dir.File("AndroidManifest.xml"),
                        SUBST_DICT=self.params)
     self.env.Command(self.outdir.File("project.properties"),
                      self.template_dir.File("project.properties"),
                      Copy("$TARGET", "$SOURCE"))
     custom_rules = [
         '<?xml version="1.0" encoding="UTF-8"?>',
         '<project name="custom_rules" default="help">',
         '<property file="{}"/>'.format(
             os.path.join('..', '..', '..', '..', '..', '..', 'src', 'java',
                          'android', 'service',
                          'signing.properties')), '</project>'
     ]
     self.env.Textfile(self.outdir.File("custom_rules.xml"), custom_rules)
     self.env.Command(self.outdir.Dir("src"), [], Mkdir("$TARGET"))
     for res_subpath in os.listdir(self.res_template_dir.abspath):
         if res_subpath.startswith("values") and os.path.isdir(
                 os.path.join(self.res_template_dir.abspath, res_subpath)):
             filename = "{__language__}-{__type__}.xml".format(
                 **self.params)
             infile = self.res_template_dir.Dir(res_subpath).File(filename)
             outfile = self.res_outdir.Dir(res_subpath).File(filename)
             if os.path.isfile(infile.abspath):
                 self.env.Substfile(outfile, infile, SUBST_DICT=self.params)
     for res_subpath in os.listdir(self.main_pkg_res_dir.abspath):
         if res_subpath.startswith("drawable") and os.path.isdir(
                 os.path.join(self.main_pkg_res_dir.abspath, res_subpath)):
             filename = "ic_launcher.png"
             infile = self.main_pkg_res_dir.Dir(res_subpath).File(filename)
             outfile = self.res_outdir.Dir(res_subpath).File(filename)
             if os.path.isfile(infile.abspath):
                 self.env.Command(outfile, infile,
                                  Copy("$TARGET", "$SOURCE"))
     self.arch.package()
Пример #15
0
def autobuild_bootstrap_file(file_name, image_list):
    """Combine multiple firmware images into a single bootstrap hex file.

    The files listed in image_list must be products of either this tile or any
    dependency tile and should correspond exactly with the base name listed on
    the products section of the module_settings.json file of the corresponding
    tile.  They must be listed as firmware_image type products.

    This function keeps a global map of all of the intermediate files that it
    has had to create so that we don't try to build them multiple times.

    Args:
        file_name(str): Full name of the output bootstrap hex file.
        image_list(list of str): List of files that will be combined into a
            single hex file that will be used to flash a chip.
    """

    family = utilities.get_family('module_settings.json')
    target = family.platform_independent_target()
    resolver = ProductResolver.Create()

    env = Environment(tools=[])

    output_dir = target.build_dirs()['output']
    build_dir = target.build_dirs()['build']

    build_output_name = os.path.join(build_dir, file_name)
    full_output_name = os.path.join(output_dir, file_name)

    processed_input_images = []

    for image_name in image_list:
        image_info = resolver.find_unique('firmware_image', image_name)
        image_path = image_info.full_path

        hex_path = arm.ensure_image_is_hex(image_path)
        processed_input_images.append(hex_path)

    env.Command(
        build_output_name,
        processed_input_images,
        action=Action(
            arm.merge_hex_executables,
            "Merging %d hex files into $TARGET" % len(processed_input_images)))
    env.Command(full_output_name, build_output_name,
                Copy("$TARGET", "$SOURCE"))
Пример #16
0
def copy_dependency_images(tile):
    """Copy all documentation from dependencies into build/output/doc folder"""

    env = Environment(tools=[])

    outputbase = os.path.join('build', 'output')
    depbase = os.path.join('build', 'deps')
    for dep in tile.dependencies:
        depdir = os.path.join(depbase, dep['unique_id'])
        outputdir = os.path.join(outputbase)

        deptile = IOTile(depdir)

        for image in deptile.find_products('firmware_image'):
            name = os.path.basename(image)
            input_path = os.path.join(depdir, name)
            output_path = os.path.join(outputdir, name)
            env.Command([output_path], [input_path],
                        Copy("$TARGET", "$SOURCE"))
Пример #17
0
def autobuild_release(family=None):
    """Copy necessary files into build/output so that this component can be used by others

    Args:
        family (ArchitectureGroup): The architecture group that we are targeting.  If not
            provided, it is assumed that we are building in the current directory and the
            module_settings.json file is read to create an ArchitectureGroup
    """

    if family is None:
        family = utilities.get_family('module_settings.json')

    env = Environment(tools=[])
    env['TILE'] = family.tile

    target = env.Command(['#build/output/module_settings.json'],
                         ['#module_settings.json'],
                         action=env.Action(create_release_settings_action,
                                           "Creating release manifest"))
    env.AlwaysBuild(target)

    # Copy over release notes if they exist
    if os.path.exists('RELEASE.md'):
        env.Command(['build/output/RELEASE.md'], ['RELEASE.md'],
                    Copy("$TARGET", "$SOURCE"))

    # Now copy across the build products that are not copied automatically
    copy_include_dirs(family.tile)
    copy_tilebus_definitions(family.tile)
    copy_dependency_docs(family.tile)
    copy_linker_scripts(family.tile)

    # Allow users to specify a hide_dependency_images flag that does not copy over all firmware images
    if not family.tile.settings.get('hide_dependency_images', False):
        copy_dependency_images(family.tile)

    copy_extra_files(family.tile)
    build_python_distribution(family.tile)
Пример #18
0
def copy_include_dirs(tile):
    """Copy all include directories that this tile defines as products in build/output/include
    """

    if 'products' not in tile.settings:
        return

    incdirs = tile.settings['products'].get('include_directories', [])
    incdirs = map(lambda x: os.path.normpath(utilities.join_path(x)), incdirs)
    incdirs = sorted(incdirs, key=lambda x: len(x))

    seen_dirs = pygtrie.PrefixSet(
        factory=lambda: pygtrie.StringTrie(separator=os.path.sep))

    env = Environment(tools=[])

    # all include directories are relative to the firmware/src directory
    outputbase = os.path.join('build', 'output', 'include')
    inputbase = os.path.join('firmware', 'src')
    for inc in incdirs:
        if inc in seen_dirs:
            continue

        relinput = os.path.join(inputbase, inc)
        finaldir = os.path.join(outputbase, inc)

        for folder, subdirs, filenames in os.walk(relinput):
            relfolder = os.path.relpath(folder, relinput)
            for filename in filenames:
                if filename.endswith(".h"):
                    infile = os.path.join(folder, filename)
                    outfile = os.path.join(finaldir, relfolder, filename)
                    env.Command([outfile], [infile],
                                Copy("$TARGET", "$SOURCE"))

        seen_dirs.add(inc)
Пример #19
0
def build_python_distribution(tile):
    """Gather support/data files to create python wheel distribution."""
    env = Environment(tools=[])

    builddir = os.path.join('build', 'python')
    packagedir = os.path.join(builddir, tile.support_distribution)
    outdir = os.path.join('build', 'output', 'python')
    outsrcdir = os.path.join(outdir, 'src')

    if not tile.has_wheel:
        return

    buildfiles = []
    datafiles = []

    pkg_init = os.path.join(packagedir, '__init__.py')

    # Make sure we always clean up the temporary python directory that we are creating
    # so that there are no weird build issues
    env.Command(pkg_init, [], [
        Delete(builddir),
        Mkdir(builddir),
        Mkdir(packagedir),
        Touch(pkg_init)
        ])

    # Make sure build/output/python/src exists
    # We create a timestamp placeholder file so that other people can depend
    # on this directory and we always delete it so that we reclean everything
    # up and don't have any old files.
    outsrcnode = env.Command(os.path.join(outsrcdir, ".timestamp"), [], [
        Delete(outsrcdir),
        Mkdir(outsrcdir),
        Touch(os.path.join(outsrcdir, ".timestamp"))])


    for outpath, inpath in iter_support_files(tile):
        outfile = os.path.join(outsrcdir, outpath)
        buildfile = os.path.join(packagedir, outpath)

        target = env.Command(outfile, inpath, Copy("$TARGET", "$SOURCE"))
        env.Depends(target, outsrcnode)

        target = env.Command(buildfile, inpath, Copy("$TARGET", "$SOURCE"))
        env.Depends(target, pkg_init)

        if 'data' in os.path.normpath(buildfile).split(os.sep):
            datafile = os.path.join(tile.support_distribution, outpath)
            datafiles.append(datafile)

        buildfiles.append(buildfile)

    # Create setup.py file and then use that to build a python wheel and an sdist
    env['TILE'] = tile
    env['datafiles'] = datafiles
    support_sdist = "%s-%s.tar.gz" % (tile.support_distribution, tile.parsed_version.pep440_string())
    wheel_output = os.path.join('build', 'python', 'dist', tile.support_wheel)
    sdist_output = os.path.join('build', 'python', 'dist', support_sdist)

    env.Clean(os.path.join(outdir, tile.support_wheel), os.path.join('build', 'python'))
    env.Command([os.path.join(builddir, 'setup.py'), os.path.join(builddir, 'MANIFEST.in'),
                wheel_output], ['module_settings.json'] + buildfiles,
                action=Action(generate_setup_and_manifest, "Building python distribution"))

    env.Depends(sdist_output, wheel_output)
    env.Command([os.path.join(outdir, tile.support_wheel)], [wheel_output], Copy("$TARGET", "$SOURCE"))
    env.Command([os.path.join(outdir, support_sdist)], [sdist_output], Copy("$TARGET", "$SOURCE"))

    # Also copy over all dependency wheels as well
    wheels = find_dependency_wheels(tile)

    if "python_universal" in tile.settings:
        required_version = "py2.py3"
    else:
        required_version = "py3" if sys.version_info[0] == 3 else "py2"

    for wheel in wheels:
        wheel_name = os.path.basename(wheel)
        wheel_basename = '-'.join(wheel.split('-')[:-3])
        wheel_pattern = wheel_basename + "-*" + required_version + '*'

        wheel_source = glob.glob(wheel_pattern)
        wheel_real = glob.glob(wheel_basename + '*')

        if wheel_source:
            env.Command([os.path.join(outdir, wheel_name)], [wheel_source[0]], Copy("$TARGET", "$SOURCE"))
        else:
            print("This package is set up to require", required_version)
            print("Dependency version appears to be", wheel_real[0].split('/')[-1])
            raise BuildError("dependent wheel not built with compatible python version")
Пример #20
0
def build_app(target, source, env):
    """
    Build App.

    PLUGINS - a list of plugins to install; as a feature/hack/bug (inspired by
    Qt, but probably needed by other libs) you can pass a tuple where the first
    is the file/node and the second is the folder under PlugIns/ that you want
    it installed to
    """
    # TODO: make it strip(1) the installed binary (saves about 1Mb)

    # EEEP: this code is pretty flakey because I can't figure out how to force;
    # have asked the scons list about it

    # This doesn't handle Frameworks correctly, only .dylibs
    # useful to know:
    # http://developer.apple.com/documentation/MacOSX/Conceptual/BPFrameworks/
    # Concepts/FrameworkAnatomy.html#//apple_ref/doc/uid/20002253
    # ^ so you do have to copy in and _entire_ framework to be sure...
    # but for some frameworks it's okay to pretend they are regular

    bundle = target[0]
    binary = source[0]

    # this is copied from emit_app, which is unfortunate
    contents = Dir(os.path.join(str(bundle), "Contents"))
    MacOS = Dir(os.path.join(str(contents), "MacOS/"))
    frameworks = Dir(
        os.path.join(str(contents), "Frameworks")
    )  # we put both frameworks and standard unix sharedlibs in here
    plugins = Dir(os.path.join(str(contents), "PlugIns"))

    # installed_bin = source[-1] #env['APP_INSTALLED_BIN']
    installed_bin = os.path.join(str(MacOS), os.path.basename(str(binary)))

    strip = bool(env.get("STRIP", False))

    otool_local_paths = env.get("OTOOL_LOCAL_PATHS", [])
    otool_system_paths = env.get("OTOOL_SYSTEM_PATHS", [])

    # TODO: expose the ability to override the list of System dirs
    # ugh, I really don't like this... I wish I could package it up nicer. I
    # could use a Builder but then I would have to pass in to the builder
    # installed_bin which seems backwards since

    # could we use patch_lib on the initial binary itself????

    def embed_lib(abs):
        """
        Get the path to embed library abs in the bundle.
        """
        name = os.path.basename(abs)
        return os.path.join(str(frameworks), name)

    def relative(emb):
        """
        Compute the path of the given embedded binary relative to the binary.

        (i.e. @executable_path/../+...)
        """
        # assume that we start in X.app/Contents/, since we know necessarily
        # that @executable_path/../ gives us that so then we only need
        base = os.path.abspath(str(installed_bin))
        emb = os.path.abspath(emb)  # XXX is abspath really necessary?
        # the path from Contents/ down to the file. Since we are taking away
        # the length of the common prefix we are left with only what is unique
        # to the embedded library's path
        down = emb[len(os.path.commonprefix([base, emb])):]
        return os.path.join("@executable_path/../", down)

    # todo: precache all this shit, in case we have to change the install names
    # of a lot of libraries

    def automagic_references(embedded):  # XXX bad name
        "modify a binary file to patch up all it's references"

        for ref in otool.dependencies(embedded):
            if ref in locals:
                embd = locals[ref][
                    1]  # the path that this reference is getting embedded at
                otool.change_ref(str(embedded), ref, relative(embd))

    def patch_lib(embedded):
        otool.change_id(
            embedded,
            relative(embedded))  # change the name the library knows itself as
        automagic_references(embedded)
        if strip:
            # XXX stripping seems to only work on libs compiled a certain way,
            # todo: try out ALL the options, see if can adapt it to work on
            # every sort of lib
            system("strip -S '%s' 2>/dev/null" % embedded)
            # (the stripping fails with ""symbols referenced by relocation
            # entries that can't be stripped"" for some obscure Apple-only
            # reason sometimes, related to their hacks to gcc---it depends on
            # how the file was compiled; since we don't /really/ care about
            # this we just let it silently fail)

    # Workarounds for a bug/feature in SCons such that it doesn't necessarily
    # run the source builders before the target builders (wtf scons??)
    Execute(Mkdir(contents))
    Execute(Mkdir(MacOS))
    Execute(Mkdir(frameworks))
    Execute(Mkdir(plugins))

    # XXX locals should be keyed by absolute path to the lib, not by reference;
    # that way it's easy to tell when a lib referenced in two different ways is
    # actually the same XXX rename locals => embeds
    # precache the list of names of libs we are using so we can figure out if a
    # lib is local or not (and therefore a ref to it needs to be updated)

    # XXX it seems kind of wrong to only look at the basename (even if, by the
    # nature of libraries, that must be enough) but there is no easy way to
    # compute the abspath

    locals = {}
    # [ref] => (absolute_path, embedded_path) (ref is the original reference
    # from looking at otool -L; we use this to decide if two libs are the same)

    # XXX it would be handy if embed_dependencies returned the otool list for
    # each ref it reads..
    binary_rpaths = otool.rpaths(str(binary))
    otool_local_paths = binary_rpaths + otool_local_paths
    for ref, path in otool.embed_dependencies(str(binary),
                                              LOCAL=otool_local_paths,
                                              SYSTEM=otool_system_paths):
        locals[ref] = (path, embed_lib(path))

    # XXX bad name #list of tuples (source, embed) of plugins to stick under
    # the plugins/ dir
    plugins_l = []
    for p in env["PLUGINS"]:  # build any necessary dirs for plugins (siiiigh)
        embedded_p = os.path.join(str(plugins), os.path.basename(str(p)))
        plugins_l.append((str(p), embedded_p))

    for subdir, p in env["QT_HACK"]:
        Execute(Mkdir(os.path.join(str(plugins), subdir)))
        embedded_p = os.path.join(str(plugins), subdir,
                                  os.path.basename(str(p)))
        plugins_l.append((p, embedded_p))

    print("Scanning plugins for new dependencies:")
    for p, ep in plugins_l:
        print("Scanning plugin", p)
        for ref, path in otool.embed_dependencies(p,
                                                  LOCAL=otool_local_paths,
                                                  SYSTEM=otool_system_paths):
            if ref not in locals:
                locals[ref] = path, embed_lib(path)
            else:
                assert path == locals[ref][0], "Path '%s' is not '%s'" % (
                    path,
                    locals[ref][0],
                )

    # we really should have a libref-to-abspath function somewhere... right now
    # it's inline in embed_dependencies() better yet, make a Frameworks type
    # that you say Framework("QtCore") and then can use that as a dependency
    print("Installing main binary:")
    Execute(Copy(installed_bin, binary))
    # e.g. this SHOULD be an env.Install() call, but if scons decides to run
    # build_app before that env.Install then build_app fails and brings the
    # rest of the build with it, of course
    for ref in otool.dependencies(str(installed_bin)):
        if ref in locals:
            embedded = locals[ref][1]
            otool.change_ref(
                str(installed_bin), ref, relative(embedded)
            )  # change the reference to the library in the program binary
    if strip:
        system("strip '%s'" % installed_bin)

    print("Installing embedded libs:")
    for ref, (abs, embedded) in locals.items():
        real_abs = os.path.realpath(abs)
        print("installing", real_abs, "to", embedded)
        # NOTE(rryan): abs can be a symlink. we want to copy the binary it is
        # pointing to. os.path.realpath does this for us.
        Execute(Copy(embedded, real_abs))
        if not os.access(embedded, os.W_OK):
            print("Adding write permissions to %s" % embedded_p)
            mode = os.stat(embedded).st_mode
            os.chmod(embedded, mode | stat.S_IWUSR)
        patch_lib(embedded)

    print("Installing plugins:")
    for p, embedded_p in plugins_l:
        real_p = os.path.realpath(p)
        print("installing", real_p, "to", embedded_p)
        # NOTE(rryan): p can be a symlink. we want to copy the binary it is
        # pointing to. os.path.realpath does this for us.
        Execute(Copy(embedded_p, real_p))  # :/
        patch_lib(str(embedded_p))
Пример #21
0
def autobuild_shiparchive(src_file):
    """Create a ship file archive containing a yaml_file and its dependencies.

    If yaml_file depends on any build products as external files, it must
    be a jinja2 template that references the file using the find_product
    filter so that we can figure out where those build products are going
    and create the right dependency graph.

    Args:
        src_file (str): The path to the input yaml file template.  This
            file path must end .yaml.tpl and is rendered into a .yaml
            file and then packaged into a .ship file along with any
            products that are referenced in it.
    """

    if not src_file.endswith('.tpl'):
        raise BuildError("You must pass a .tpl file to autobuild_shiparchive",
                         src_file=src_file)

    env = Environment(tools=[])

    family = ArchitectureGroup('module_settings.json')
    target = family.platform_independent_target()
    resolver = ProductResolver.Create()

    #Parse through build_step products to see what needs to imported
    custom_steps = []
    for build_step in family.tile.find_products('build_step'):
        full_file_name = build_step.split(":")[0]
        basename = os.path.splitext(os.path.basename(full_file_name))[0]
        folder = os.path.dirname(full_file_name)

        fileobj, pathname, description = imp.find_module(basename, [folder])
        mod = imp.load_module(basename, fileobj, pathname, description)
        full_file_name, class_name = build_step.split(":")
        custom_steps.append((class_name, getattr(mod, class_name)))
    env['CUSTOM_STEPS'] = custom_steps

    env["RESOLVER"] = resolver

    base_name, tpl_name = _find_basename(src_file)
    yaml_name = tpl_name[:-4]
    ship_name = yaml_name[:-5] + ".ship"

    output_dir = target.build_dirs()['output']
    build_dir = os.path.join(target.build_dirs()['build'], base_name)
    tpl_path = os.path.join(build_dir, tpl_name)
    yaml_path = os.path.join(build_dir, yaml_name)
    ship_path = os.path.join(build_dir, ship_name)
    output_path = os.path.join(output_dir, ship_name)

    # We want to build up all related files in
    # <build_dir>/<ship archive_folder>/
    # - First copy the template yaml over
    # - Then render the template yaml
    # - Then find all products referenced in the template yaml and copy them
    # - over
    # - Then build a .ship archive
    # - Then copy that archive into output_dir

    ship_deps = [yaml_path]

    env.Command([tpl_path], [src_file], Copy("$TARGET", "$SOURCE"))

    prod_deps = _find_product_dependencies(src_file, resolver)

    env.Command([yaml_path], [tpl_path],
                action=Action(template_shipfile_action, "Rendering $TARGET"))

    for prod in prod_deps:
        dest_file = os.path.join(build_dir, prod.short_name)
        ship_deps.append(dest_file)
        env.Command([dest_file], [prod.full_path], Copy("$TARGET", "$SOURCE"))

    env.Command([ship_path], [ship_deps],
                action=Action(create_shipfile,
                              "Archiving Ship Recipe $TARGET"))
    env.Command([output_path], [ship_path], Copy("$TARGET", "$SOURCE"))