def build():
    # Make maven-enforcer-plugin happy.
    java_version = detect_java_version()
    sed_inplace("pom.xml", "<javaVersion>1.7", "<javaVersion>" + java_version)
    sed_inplace("pom.xml", "<targetJavaVersion>1.7",
                "<targetJavaVersion>" + java_version)

    # Disable hadoop-annotations and pull them from Maven Central. This
    # module seems to require tools.jar, but even if it is in the right
    # place, the compilation still fails with cryptic error messages.
    sed_inplace(os.path.join("hadoop-common-project", "pom.xml"),
                "<module>hadoop-annotations</module>", "")

    if sys.platform in ["cygwin", "win32"]:
        target = "native-win"

        for sln in [
                "hadoop-common-project\\hadoop-common\\src\\main\\native\\native.sln",
                "hadoop-common-project\\hadoop-common\\src\\main\\winutils\\winutils.sln"
        ]:
            run("devenv /upgrade " + sln)

        sed_inplace("hadoop-hdfs-project\\hadoop-hdfs\\pom.xml",
                    "Visual Studio 10", "Visual Studio 14")

        hdfs_cmake_path = "hadoop-hdfs-project\\hadoop-hdfs\\src\\CMakeLists.txt"
        with open(hdfs_cmake_path, "a") as cml:
            cml.write(RUNTIME_LIB_OVERRIDE)
    else:
        target = "native"

    run("mvn -q install -pl :hadoop-maven-plugins -am")
    run("mvn -q compile -P{} -pl :hadoop-hdfs -am".format(target),
        env=dict(os.environ, CFLAGS="-fPIC"))
def install_dependencies():
    if sys.platform == "linux":
        protobuf_archive, _headers = urlretrieve(
            "https://github.com/google/protobuf/releases/download/"
            "v2.5.0/protobuf-2.5.0.zip")
        with zipfile.ZipFile(protobuf_archive, "r") as zf:
            zf.extractall()

        with cd("protobuf-2.5.0"):
            run("sh configure")
            run("make")

            os.environ["HADOOP_PROTOC_CDH5_PATH"] = \
                os.path.join(os.getcwd(), "src", "protoc")
    elif sys.platform == "darwin":
        run("brew install [email protected]", stdout=open(os.devnull, "wb"))
        os.environ["HADOOP_PROTOC_CDH5_PATH"] = \
            "/usr/local/opt/[email protected]/bin/protoc"

        # Make maven-antrun-plugin happy and put tools.jar to the
        # magical place.
        java_home = os.environ["JAVA_HOME"] = subprocess.check_output(
            "/usr/libexec/java_home").strip().decode()
        # Not pure Python because we need sudo.
        run("sudo mkdir -p " + os.path.join(java_home, "Classes"))
        run("sudo ln -nsf {} {}".format(
            os.path.join(java_home, "lib", "tools.jar"),
            os.path.join(java_home, "Classes", "classes.jar")))
    elif sys.platform in ["cygwin", "win32"]:
        protobuf_archive, _headers = urlretrieve(
            "https://github.com/google/protobuf/releases/download/"
            "v2.5.0/protoc-2.5.0-win32.zip")
        with zipfile.ZipFile(protobuf_archive, "r") as zf:
            zf.extractall()

        os.environ["HADOOP_PROTOC_CDH5_PATH"] = \
            os.path.join(os.getcwd(), "protoc.exe")
Beispiel #3
0
WITH_OPENMP = False

if __name__ == "__main__":
    os.chdir("xgboost")
    xgboost_dir = os.getcwd()

    openmp_build_flag = ""
    if not WITH_OPENMP:
        openmp_build_flag = " -DUSE_OPENMP=OFF -DDISABLE_OPENMP"

    # Compute DMLC xgboost version, i.e: 1.1.0
    xgboost_version = os.environ["XGBOOST_BASE_VERSION"]
    [dmlc_version] = re.findall(r"^(.*?)-", xgboost_version)

    os.chdir("jvm-packages")
    run("mvn -q -B versions:set -DnewVersion=" + xgboost_version)

    # versions:update-property only updates properties which define
    # artifact versions, therefore we have to resort to sed.
    scala_version = os.environ["SCALA_VERSION"]
    [scala_binary_version] = re.findall(r"^(2\.1[012])\.\d+", scala_version)
    sed_inplace("pom.xml",
                "<scala.binary.version>[^<]+",
                "<scala.binary.version>" + scala_binary_version,
                regex=True)
    sed_inplace("pom.xml",
                "<scala.version>[^<]+",
                "<scala.version>" + scala_version,
                regex=True)
    sed_inplace("pom.xml",
                "<spark.version>[^<]+",
from __future__ import print_function, unicode_literals

import os
import re
import shutil
import sys
from _internal import run, sed_inplace, maybe_makedirs


if __name__ == "__main__":
    os.chdir("xgboost")
    xgboost_dir = os.getcwd()

    os.chdir("jvm-packages")
    run("mvn -q -B versions:set -DnewVersion=" + os.environ["XGBOOST_VERSION"])

    # versions:update-property only updates properties which define
    # artifact versions, therefore we have to resort to sed.
    scala_version = os.environ["SCALA_VERSION"]
    [scala_binary_version] = re.findall(r"^(2\.1[012])\.\d+", scala_version)
    sed_inplace("pom.xml",
                "<scala.binary.version>[^<]+",
                "<scala.binary.version>" + scala_binary_version, regex=True)
    sed_inplace("pom.xml",
                "<scala.version>[^<]+",
                "<scala.version>" + scala_version, regex=True)
    sed_inplace("pom.xml",
                "<spark.version>[^<]+",
                "<spark.version>" + os.environ["SPARK_VERSION"], regex=True)