Exemple #1
0
def copyNativeLibs(config):
    """
    Copy the compiled native J2V8 library (.dll/.dylib/.so) into the Java resources tree
    for inclusion into the later built Java JAR.
    """
    platform_cmake_out = config.inject_env(cmake_out_dir)

    if (utils.is_win32(config.platform)):
        platform_cmake_out += "Debug/" if hasattr(config, 'debug') and config.debug else "Release/"

    lib_pattern = config.inject_env(platform_cmake_out + "*j2v8-*$FILE_ABI.$LIB_EXT")
    platform_lib_path = glob.glob(lib_pattern)

    if (len(platform_lib_path) == 0):
        utils.cli_exit("ERROR: Could not find native library for inclusion in platform target package")

    platform_lib_path = platform_lib_path[0]

    copy_cmds = []

    lib_target_path = None
    if (utils.is_android(config.platform)):
        lib_target_path = config.inject_env("src/main/jniLibs/$FILE_ABI") # directory path
        copy_cmds += mkdir(lib_target_path)
        lib_target_path += "/libj2v8.so" # final lib file path
    else:
        lib_target_path = "src/main/resources/"

    print "Copying native lib from: " + platform_lib_path + " to: " + lib_target_path

    copy_cmds += cp(platform_lib_path + " " + lib_target_path)

    return copy_cmds
Exemple #2
0
    def cross_compiler(self, cross_host_name):
        compiler = self.cross_compilers.get(cross_host_name)

        if (not compiler):
            utils.cli_exit("ERROR: internal error while looking for cross-compiler: " + cross_host_name)

        return compiler()
Exemple #3
0
def copyNativeLibs(config):
    """
    Copy the compiled native J2V8 library (.dll/.dylib/.so) into the Java resources tree
    for inclusion into the later built Java JAR.
    """
    platform_cmake_out = config.inject_env(cmake_out_dir)

    if (utils.is_win32(config.platform)):
        platform_cmake_out += "Debug/" if hasattr(config, 'debug') and config.debug else "Release/"

    lib_pattern = config.inject_env(platform_cmake_out + "*j2v8-*$FILE_ABI.$LIB_EXT")
    platform_lib_path = glob.glob(lib_pattern)

    if (len(platform_lib_path) == 0):
        utils.cli_exit("ERROR: Could not find native library for inclusion in platform target package")

    platform_lib_path = platform_lib_path[0]

    copy_cmds = []

    lib_target_path = None
    if (utils.is_android(config.platform)):
        lib_target_path = config.inject_env("src/main/jniLibs/$FILE_ABI") # directory path
        copy_cmds += mkdir(lib_target_path)
        lib_target_path += "/libj2v8.so" # final lib file path
    else:
        lib_target_path = "src/main/resources/"

    print "Copying native lib from: " + platform_lib_path + " to: " + lib_target_path

    copy_cmds += cp(platform_lib_path + " " + lib_target_path)

    return copy_cmds
Exemple #4
0
 def health_check(self, config):
     try:
         shell_check_cmd = "ver" if utils.is_win32(
             config.platform) else "bash --version"
         self.exec_cmd(shell_check_cmd, config)
     except subprocess.CalledProcessError:
         utils.cli_exit("ERROR: Failed Shell build-system health check!")
Exemple #5
0
 def health_check(self, config):
     print "Verifying Vagrant build-system status..."
     try:
         self.exec_host_cmd("vagrant --version", config)
     except subprocess.CalledProcessError:
         utils.cli_exit(
             "ERROR: Failed Vagrant build-system health check, make sure Vagrant is available and running!"
         )
Exemple #6
0
    def cross_compiler(self, cross_host_name):
        compiler = self.cross_compilers.get(cross_host_name)

        if (not compiler):
            utils.cli_exit(
                "ERROR: internal error while looking for cross-compiler: " +
                cross_host_name)

        return compiler()
Exemple #7
0
def run_interactive_cli():
    idx = 0
    for cfg in bcfg.configs:
        print("[" + str(idx) + "] " + cfg.get("name"))
        idx += 1
    print  # newline

    # NOTE: argv[1] usually should be -i, therefore we need to consider this arg in all checks
    base_arg_count = 2

    sel_index = \
          int(sys.argv[base_arg_count]) \
          if len(sys.argv) > base_arg_count \
          else input("Select a predefined build-configuration to run: ")

    if not isinstance(sel_index,
                      int) or sel_index < 0 or sel_index > len(bcfg.configs):
        utils.cli_exit(
            "ERROR: Must enter a valid test index in the range [0 ... " +
            str(len(bcfg.configs)) + "]")

    selected_build_cfg = bcfg.configs[sel_index]

    print("Building: " + selected_build_cfg.get("name"))
    print  # newline

    build_params = selected_build_cfg.get("params")

    # use build-steps from sys.argv or alternatively ask the user
    build_steps_argv = \
          sys.argv[base_arg_count + 1:] \
          if len(sys.argv) > base_arg_count + 1 \
          else shlex.split(raw_input("Override build-steps ? (leave empty to run pre-configured steps): "))

    # create a parser that only expects the build-step args
    parser = cli.get_blank_parser()
    cli.init_build_steps(parser)

    # parse the build-step syntax
    user_params = parser.parse_args(build_steps_argv)

    # convert into dictionary form
    user_params = vars(user_params)

    # merge the potentially customized build-steps into the
    # original pre-defined build-config params
    # see: https://stackoverflow.com/a/15277395/425532
    build_params.update(
        (k, v) for k, v in user_params.iteritems() if v is not None)

    # start the build
    bex.execute_build(build_params)
Exemple #8
0
    def health_check(self, config):
        print "Verifying Docker build-system status..."
        try:
            # general docker availability check
            self.exec_host_cmd("docker --version", config)

            # check the currently active container technology (linux vs. windows containers)
            # NOTE: the additional newlines are important for the regex matching
            version_str = utils.execute_to_str("docker version") + "\n\n"

            server_match = re.search(r"Server:(.*)\n\n", version_str + "\n\n", re.DOTALL)

            if (server_match is None or server_match.group(1) is None):
                utils.cli_exit("ERROR: Unable to determine docker server version from version string: \n\n" + version_str)

            version_match = re.search(r"^ OS/Arch:\s+(.*)$", server_match.group(1), re.MULTILINE)

            if (version_match is None):
                utils.cli_exit("ERROR: Unable to determine docker server platform from version string: \n\n" + version_str)

            docker_version = version_match.group(1)

            docker_req_platform = "windows" if utils.is_win32(config.platform) else "linux"

            # check if the docker engine is running the expected container platform (linux or windows)
            if (docker_req_platform not in docker_version):
                utils.cli_exit("ERROR: docker server must be using " + docker_req_platform + " containers, instead found server version using: " + docker_version)

        except subprocess.CalledProcessError:
            utils.cli_exit("ERROR: Failed Docker build-system health check, make sure Docker is available and running!")
Exemple #9
0
def run_interactive_cli():
      idx = 0
      for cfg in bcfg.configs:
            print ("[" + str(idx) + "] " + cfg.get("name"))
            idx += 1
      print # newline

      # NOTE: argv[1] usually should be -i, therefore we need to consider this arg in all checks
      base_arg_count = 2

      sel_index = \
            int(sys.argv[base_arg_count]) \
            if len(sys.argv) > base_arg_count \
            else input("Select a predefined build-configuration to run: ")

      if not isinstance(sel_index, int) or sel_index < 0 or sel_index > len(bcfg.configs):
            utils.cli_exit("ERROR: Must enter a valid test index in the range [0 ... " + str(len(bcfg.configs)) + "]")

      selected_build_cfg = bcfg.configs[sel_index]

      print ("Building: " + selected_build_cfg.get("name"))
      print # newline

      build_params = selected_build_cfg.get("params")

      # use build-steps from sys.argv or alternatively ask the user
      build_steps_argv = \
            sys.argv[base_arg_count + 1:] \
            if len(sys.argv) > base_arg_count + 1 \
            else shlex.split(raw_input("Override build-steps ? (leave empty to run pre-configured steps): "))

      # create a parser that only expects the build-step args
      parser = cli.get_blank_parser()
      cli.init_build_steps(parser)

      # parse the build-step syntax
      user_params = parser.parse_args(build_steps_argv)

      # convert into dictionary form
      user_params = vars(user_params)

      # merge the potentially customized build-steps into the
      # original pre-defined build-config params
      # see: https://stackoverflow.com/a/15277395/425532
      build_params.update((k,v) for k,v in user_params.iteritems() if v is not None)

      # start the build
      bex.execute_build(build_params)
Exemple #10
0
def execute_build(params):
    """
    Receives an params-object with all the necessary build-settings to start
    building the J2V8 artifacts. There are two paths internally that this function will take:

    A) Run the build in the same OS shell environment that the build.py command was started from.
    This means you have to make sure all the necessary build utensils are installed on your system.
    To find out what is needed to build on a particular platform you can have a look in the "docker"
    and "vagrant" directories, they contain shell scripts that show how to install all the things
    you need if you would want to set up a build environment manually on your machine.

    B) Use virtualization technologies to run a sandboxed build-environment that does not rely
    on your machine having installed any of the required build-tools natively. This also allows
    to cross-compile mostly all supported platforms independently of the host operating system that
    you are running on your machine (only Docker and/or Vagrant are required to run this).
    """
    # convert from a dictionary form to the normalized params-object form
    if (isinstance(params, dict)):
        params = cli.BuildParams(params)

    # can be used to force output of all started sub-processes through the host-process stdout
    utils.redirect_stdout_enabled = hasattr(
        params, "redirect_stdout") and params.redirect_stdout

    if (params.target is None):
        utils.cli_exit("ERROR: No target platform specified")

    if (params.docker and params.vagrant):
        utils.cli_exit(
            "ERROR: Choose either Docker or Vagrant for the build, can not use both"
        )

    target = params.target

    if (not target in bc.platform_configs):
        utils.cli_exit("ERROR: Unrecognized target platform: " + target)

    # this defines the PlatformConfig / operating system the build should be run for
    target_platform = bc.platform_configs.get(target)

    if (params.arch is None):
        utils.cli_exit("ERROR: No target architecture specified")

    avail_architectures = target_platform.architectures

    if (not params.arch in avail_architectures):
        utils.cli_exit("ERROR: Unsupported architecture: \"" + params.arch +
                       "\" for selected target platform: " + target)

    if (params.buildsteps is None):
        utils.cli_exit("ERROR: No build-step specified, valid values are: " +
                       ", ".join(bc.avail_build_steps))

    if (not params.buildsteps is None
            and not isinstance(params.buildsteps, list)):
        params.buildsteps = [params.buildsteps]

    parsed_steps = BuildState.parsed_steps
    parsed_steps.clear()

    # first look for the advanced form of build-step where it might be specified with some arguments to be passed
    # to the underlying build-tool (e.g. --j2v8test="-Dtest=NodeJSTest")
    for step in bc.atomic_build_step_sequence:
        step_args = getattr(params, step, None)

        if step_args:
            parsed_steps.add(step)

    # if there were no special build-step args or atomic build-step args passed
    # then fall back to the default behavior and run all known steps
    if not any(parsed_steps) and not any(params.buildsteps):
        params.buildsteps = ["all"]

    # then go through the raw list of basic build-steps (given by the CLI or an API call)
    # and generate a list of only the atomic build-steps that were derived in the evaluation
    for step in params.buildsteps:
        evaluate_build_step_option(step)

    # force build-steps into their pre-defined order (see: http://stackoverflow.com/a/23529016)
    parsed_steps = [
        step for step in bc.atomic_build_step_sequence if step in parsed_steps
    ]

    if (len(parsed_steps) == 0):
        utils.cli_exit("WARNING: No build-steps to be done ... exiting")

    build_cwd = utils.get_cwd()

    cross_cfg = None
    cross_configs = target_platform.cross_configs

    cross_sys = "docker" if params.docker else "vagrant" if params.vagrant else None

    # if a recognized cross-compile option was specified by the params
    # try to find the configuration parameters to run the cross-compiler
    if (cross_sys):
        if (cross_configs.get(cross_sys) is None):
            utils.cli_exit(
                "ERROR: target '" + target +
                "' does not have a recognized cross-compile host: '" +
                cross_sys + "'")
        else:
            cross_cfg = cross_configs.get(cross_sys)

    # if we are the build-instigator (not a cross-compile build-agent) we directly run some initial checks & setups for the build
    if (not params.cross_agent):
        print "Checking Node.js builtins integration consistency..."
        utils.check_node_builtins()

        v8_major, v8_minor, v8_build, v8_patch, v8_is_candidate = utils.get_v8_version(
        )
        njs_major, njs_minor, njs_patch, njs_is_release = utils.get_nodejs_version(
        )

        print "--------------------------------------------------"
        print "V8:      %(v8_major)s.%(v8_minor)s.%(v8_build)s.%(v8_patch)s (candidate: %(v8_is_candidate)s)" % locals(
        )
        print "Node.js: %(njs_major)s.%(njs_minor)s.%(njs_patch)s (release: %(njs_is_release)s)" % locals(
        )
        print "--------------------------------------------------"

        print "Caching Node.js artifacts..."
        curr_node_tag = (params.vendor + "-"
                         if params.vendor else "") + target + "." + params.arch
        utils.store_nodejs_output(curr_node_tag, build_cwd)

    def execute_build_step(build_system, build_step):
        """Creates an immutable copy of a single BuildStep configuration and executes it in the build-system"""
        # from this point on, make the build-input immutable to ensure consistency across the whole build process
        # any actions during the build-step should only be made based on the initial set of variables & conditions
        # NOTE: this restriction makes it much more easy to reason about the build-process as a whole (see "unidirectional data flow")
        build_step = immutable.freeze(build_step)
        build_system.build(build_step)

    # a cross-compile was requested, we just launch the virtualization-environment and then delegate
    # the originally requested build parameters to the cross-compile environment then running the build.py CLI
    if (cross_cfg):
        cross_compiler = target_platform.cross_compiler(cross_sys)

        parsed_step_args = ""

        # look for build-step arguments that were passed in by the user
        # e.g. --j2v8test="-Dtest=..." and pass them down to the cross-agent also
        for step in bc.atomic_build_step_sequence:
            step_args = getattr(params, step, None)

            if step_args:
                parsed_step_args += " --" + step + "='" + step_args + "'"

        # invoke the build.py CLI within the virtualized / self-contained build-system provider
        cross_cfg.custom_cmd = "python ./build.py " + \
            "--cross-agent " + cross_sys + \
            " -t $PLATFORM -a $ARCH " + \
            (" -ne" if params.node_enabled else "") + \
            (" -v " + params.vendor if params.vendor else "") + \
            (" -knl " if params.keep_native_libs else "") + \
            " " + " ".join(parsed_steps) + parsed_step_args

        # apply meta-vars & util functions
        cross_cfg.compiler = cross_compiler
        cross_cfg.inject_env = lambda s: cross_compiler.inject_env(
            s, cross_cfg)
        cross_cfg.target = target_platform

        # apply essential build params
        cross_cfg.arch = params.arch
        cross_cfg.file_abi = target_platform.file_abi(params.arch)
        cross_cfg.no_shutdown = params.no_shutdown
        cross_cfg.sys_image = params.sys_image
        cross_cfg.vendor = params.vendor
        cross_cfg.docker = params.docker
        cross_cfg.vagrant = params.vagrant

        # start the cross-compile
        execute_build_step(cross_compiler, cross_cfg)

    # run the requested build-steps & parameters in the current shell environment
    else:
        target_compiler = ShellBuildSystem()
        build_steps = dict(target_platform.steps)

        # this is a build-agent for a cross-compile
        if (params.cross_agent):
            # the cross-compile step dictates which directory will be used to run the actual build
            cross_cfg = cross_configs.get(params.cross_agent)

            if (cross_cfg is None):
                utils.cli_exit(
                    "ERROR: internal error while looking for cross-compiler config: "
                    + params.cross_agent)

            build_cwd = cross_cfg.build_cwd

        # execute all steps from a list that parsed / evaluated before (see the "build-step parsing" section above)
        for step in parsed_steps:
            if (not step in build_steps):
                print("WARNING: skipping build step \"" + step +
                      "\" (not configured and/or supported for platform \"" +
                      params.target + "\")")
                continue

            target_step = build_steps[step]

            # apply meta-vars & util functions
            target_step.cross_agent = params.cross_agent
            target_step.compiler = target_compiler
            target_step.inject_env = lambda s: target_compiler.inject_env(
                s, build_steps[step])
            target_step.target = target_platform

            # apply essential build params
            target_step.arch = params.arch
            target_step.file_abi = target_platform.file_abi(params.arch)
            target_step.node_enabled = params.node_enabled
            target_step.build_cwd = build_cwd
            target_step.vendor = params.vendor
            target_step.docker = params.docker
            target_step.vagrant = params.vagrant
            target_step.keep_native_libs = params.keep_native_libs
            target_step.args = getattr(params, step, None)

            # run the current BuildStep
            execute_build_step(target_compiler, target_step)
Exemple #11
0
def raise_unhandled_option(step):
    return lambda: utils.cli_exit(
        "INTERNAL-ERROR: Tried to handle unrecognized build-step \"" + step +
        "\"")
Exemple #12
0
 def health_check(self, config):
     print "Verifying Vagrant build-system status..."
     try:
         self.exec_host_cmd("vagrant --version", config)
     except subprocess.CalledProcessError:
         utils.cli_exit("ERROR: Failed Vagrant build-system health check, make sure Vagrant is available and running!")
Exemple #13
0
def raise_unhandled_option(step):
    return lambda: utils.cli_exit("INTERNAL-ERROR: Tried to handle unrecognized build-step \"" + step + "\"")
Exemple #14
0
def execute_build(params):
    """
    Receives an params-object with all the necessary build-settings to start
    building the J2V8 artifacts. There are two paths internally that this function will take:

    A) Run the build in the same OS shell environment that the build.py command was started from.
    This means you have to make sure all the necessary build utensils are installed on your system.
    To find out what is needed to build on a particular platform you can have a look in the "docker"
    and "vagrant" directories, they contain shell scripts that show how to install all the things
    you need if you would want to set up a build environment manually on your machine.

    B) Use virtualization technologies to run a sandboxed build-environment that does not rely
    on your machine having installed any of the required build-tools natively. This also allows
    to cross-compile mostly all supported platforms independently of the host operating system that
    you are running on your machine (only Docker and/or Vagrant are required to run this).
    """
    # convert from a dictionary form to the normalized params-object form
    if (isinstance(params, dict)):
        params = cli.BuildParams(params)

    # can be used to force output of all started sub-processes through the host-process stdout
    utils.redirect_stdout_enabled = hasattr(params, "redirect_stdout") and params.redirect_stdout

    if (params.target is None):
        utils.cli_exit("ERROR: No target platform specified")

    if (params.docker and params.vagrant):
        utils.cli_exit("ERROR: Choose either Docker or Vagrant for the build, can not use both")

    target = params.target

    if (not target in bc.platform_configs):
        utils.cli_exit("ERROR: Unrecognized target platform: " + target)

    # this defines the PlatformConfig / operating system the build should be run for
    target_platform = bc.platform_configs.get(target)

    if (params.arch is None):
        utils.cli_exit("ERROR: No target architecture specified")

    avail_architectures = target_platform.architectures

    if (not params.arch in avail_architectures):
        utils.cli_exit("ERROR: Unsupported architecture: \"" + params.arch + "\" for selected target platform: " + target)

    if (params.buildsteps is None):
        utils.cli_exit("ERROR: No build-step specified, valid values are: " + ", ".join(bc.avail_build_steps))

    if (not params.buildsteps is None and not isinstance(params.buildsteps, list)):
        params.buildsteps = [params.buildsteps]

    parsed_steps = BuildState.parsed_steps
    parsed_steps.clear()

    # first look for the advanced form of build-step where it might be specified with some arguments to be passed
    # to the underlying build-tool (e.g. --j2v8test="-Dtest=NodeJSTest")
    for step in bc.atomic_build_step_sequence:
        step_args = getattr(params, step, None)

        if step_args:
            parsed_steps.add(step)

    # if there were no special build-step args or atomic build-step args passed
    # then fall back to the default behavior and run all known steps
    if not any(parsed_steps) and not any(params.buildsteps):
        params.buildsteps = ["all"]

    # then go through the raw list of basic build-steps (given by the CLI or an API call)
    # and generate a list of only the atomic build-steps that were derived in the evaluation
    for step in params.buildsteps:
        evaluate_build_step_option(step)

    # force build-steps into their pre-defined order (see: http://stackoverflow.com/a/23529016)
    parsed_steps = [step for step in bc.atomic_build_step_sequence if step in parsed_steps]

    if (len(parsed_steps) == 0):
        utils.cli_exit("WARNING: No build-steps to be done ... exiting")

    build_cwd = utils.get_cwd()

    cross_cfg = None
    cross_configs = target_platform.cross_configs

    cross_sys = "docker" if params.docker else "vagrant" if params.vagrant else None

    # if a recognized cross-compile option was specified by the params
    # try to find the configuration parameters to run the cross-compiler
    if (cross_sys):
        if (cross_configs.get(cross_sys) is None):
            utils.cli_exit("ERROR: target '" + target + "' does not have a recognized cross-compile host: '" + cross_sys + "'")
        else:
            cross_cfg = cross_configs.get(cross_sys)

    # if we are the build-instigator (not a cross-compile build-agent) we directly run some initial checks & setups for the build
    if (not params.cross_agent):
        print "Checking Node.js builtins integration consistency..."
        utils.check_node_builtins()

        v8_major,v8_minor,v8_build,v8_patch,v8_is_candidate = utils.get_v8_version()
        njs_major,njs_minor,njs_patch,njs_is_release = utils.get_nodejs_version()

        print "--------------------------------------------------"
        print "V8:      %(v8_major)s.%(v8_minor)s.%(v8_build)s.%(v8_patch)s (candidate: %(v8_is_candidate)s)" % locals()
        print "Node.js: %(njs_major)s.%(njs_minor)s.%(njs_patch)s (release: %(njs_is_release)s)" % locals()
        print "--------------------------------------------------"

        print "Caching Node.js artifacts..."
        curr_node_tag = (params.vendor + "-" if params.vendor else "") + target + "." + params.arch
        utils.store_nodejs_output(curr_node_tag, build_cwd)

    def execute_build_step(build_system, build_step):
        """Creates an immutable copy of a single BuildStep configuration and executes it in the build-system"""
        # from this point on, make the build-input immutable to ensure consistency across the whole build process
        # any actions during the build-step should only be made based on the initial set of variables & conditions
        # NOTE: this restriction makes it much more easy to reason about the build-process as a whole (see "unidirectional data flow")
        build_step = immutable.freeze(build_step)
        build_system.build(build_step)

    # a cross-compile was requested, we just launch the virtualization-environment and then delegate
    # the originally requested build parameters to the cross-compile environment then running the build.py CLI
    if (cross_cfg):
        cross_compiler = target_platform.cross_compiler(cross_sys)

        parsed_step_args = ""

        # look for build-step arguments that were passed in by the user
        # e.g. --j2v8test="-Dtest=..." and pass them down to the cross-agent also
        for step in bc.atomic_build_step_sequence:
            step_args = getattr(params, step, None)

            if step_args:
                parsed_step_args += " --" + step + "='" + step_args + "'"

        # invoke the build.py CLI within the virtualized / self-contained build-system provider
        cross_cfg.custom_cmd = "python ./build.py " + \
            "--cross-agent " + cross_sys + \
            " -t $PLATFORM -a $ARCH " + \
            (" -ne" if params.node_enabled else "") + \
            (" -v " + params.vendor if params.vendor else "") + \
            (" -knl " if params.keep_native_libs else "") + \
            " " + " ".join(parsed_steps) + parsed_step_args

        # apply meta-vars & util functions
        cross_cfg.compiler = cross_compiler
        cross_cfg.inject_env = lambda s: cross_compiler.inject_env(s, cross_cfg)
        cross_cfg.target = target_platform

        # apply essential build params
        cross_cfg.arch = params.arch
        cross_cfg.file_abi = target_platform.file_abi(params.arch)
        cross_cfg.no_shutdown = params.no_shutdown
        cross_cfg.sys_image = params.sys_image
        cross_cfg.vendor = params.vendor
        cross_cfg.docker = params.docker
        cross_cfg.vagrant = params.vagrant

        # start the cross-compile
        execute_build_step(cross_compiler, cross_cfg)

    # run the requested build-steps & parameters in the current shell environment
    else:
        target_compiler = ShellBuildSystem()
        build_steps = dict(target_platform.steps)

        # this is a build-agent for a cross-compile
        if (params.cross_agent):
            # the cross-compile step dictates which directory will be used to run the actual build
            cross_cfg = cross_configs.get(params.cross_agent)

            if (cross_cfg is None):
                utils.cli_exit("ERROR: internal error while looking for cross-compiler config: " + params.cross_agent)

            build_cwd = cross_cfg.build_cwd

        # execute all steps from a list that parsed / evaluated before (see the "build-step parsing" section above)
        for step in parsed_steps:
            if (not step in build_steps):
                print("WARNING: skipping build step \"" + step + "\" (not configured and/or supported for platform \"" + params.target + "\")")
                continue

            target_step = build_steps[step]

            # apply meta-vars & util functions
            target_step.cross_agent = params.cross_agent
            target_step.compiler = target_compiler
            target_step.inject_env = lambda s: target_compiler.inject_env(s, build_steps[step])
            target_step.target = target_platform

            # apply essential build params
            target_step.arch = params.arch
            target_step.file_abi = target_platform.file_abi(params.arch)
            target_step.node_enabled = params.node_enabled
            target_step.build_cwd = build_cwd
            target_step.vendor = params.vendor
            target_step.docker = params.docker
            target_step.vagrant = params.vagrant
            target_step.keep_native_libs = params.keep_native_libs
            target_step.args = getattr(params, step, None)

            # run the current BuildStep
            execute_build_step(target_compiler, target_step)
Exemple #15
0
 def health_check(self, config):
     try:
         shell_check_cmd = "ver" if utils.is_win32(config.platform) else "bash --version"
         self.exec_cmd(shell_check_cmd, config)
     except subprocess.CalledProcessError:
         utils.cli_exit("ERROR: Failed Shell build-system health check!")