예제 #1
0
def cleanseFakeroot(fakeroot, installDirectory, repoDirectory):
  """Clean up a fakeroot by running prepare_repo_for_packaging if present in
  the repoDirectory. Will be called by prepFakerootFromGit and
  prepFakerootFromDirectory for each of their subdirectories.

  :param fakeroot: path to the directory to use as the root of the RPM's
    install tree
  :param installDirectory: To construct the path relative to fakeroot for
    cleanup.
  :param repoDirectory: It also used to construct the path relative to fakeroot
    for cleanup.
  """

  # Don't count on prepare_repo_for_packaging coping well if it is not run
  # from the root of the repo checkout, so store the pwd and cd into the repo
  # checkout before running cleaner or removing .git
  g_logger.debug("Cleaning fakeroot: %s", fakeroot)
  workpath = os.path.join(fakeroot, installDirectory, repoDirectory)
  if os.path.isdir(workpath):
    with changeToWorkingDir(workpath):
      cleanerScript = os.path.join(fakeroot,
                                   installDirectory,
                                   repoDirectory,
                                   "prepare_repo_for_packaging")

      if os.path.isfile(cleanerScript):
        if os.path.isfile("/tmp/noclean.rpms"):
          g_logger.info("Found /tmp/noclean.rpms, skipping cleanup script")
        else:
          g_logger.info("Found %s, executing", cleanerScript)
          runWithOutput(command=(cleanerScript, "--destroy-all-my-work"),
                        logger=g_logger)
      else:
        g_logger.debug("Optional cleanup script %s not found, skipping",
                       cleanerScript)
예제 #2
0
def prepFakerootFromDirectory(fakeroot,
                              sourceDirectory,
                              installDirectory,
                              baseDirectory):
  """
  Prepare a fakeroot from a directory by cloning a source directory to its
  top level.
  :param fakeroot: path to the directory to use as the root of the RPM's
    install tree
  :param sourceDirectory: Directory to copy from
  :param baseDirectory: Where to copy the files to create the fakeroot
  :param installDirectory: Where to create the baseDirectory
  :raises: infrastructure.utilities.exceptions.MissingDirectoryError
    if the given sourceDirectory is not found.
  """

  g_logger.info("Prepping fakeroot in %s from %s", fakeroot, sourceDirectory)
  installPath = commonFakerootPrep(fakeroot, installDirectory)
  with changeToWorkingDir(installPath):
    if not os.path.isdir(sourceDirectory):
      g_logger.error("%s is not a directory!", sourceDirectory)
      raise MissingDirectoryError("Directory not found!")
    targetDirectory = "%s/%s/%s" % (fakeroot, installDirectory, baseDirectory)
    os.makedirs(targetDirectory)
    # Find the top level files/dirs in the source directory and copy them to
    # the fakeroot
    sourceFiles = os.listdir(sourceDirectory)
    for eachFile in sourceFiles:
      g_logger.info("Copying %s to %s...", eachFile, targetDirectory)
      runWithOutput("rsync --exclude '.*.un~' -av %s/%s %s" % (sourceDirectory,
                                                               eachFile,
                                                               targetDirectory))
      cleanseFakeroot(fakeroot,
                      installDirectory,
                      "%s/%s" % (baseDirectory, eachFile))
예제 #3
0
def runTests(env, logger):
  """
    Runs NuPIC tests.

    :param env: The environment which will be set for runnung tests.

    :raises:
      infrastructure.utilities.exceptions.NupicBuildFailed
    if the given SHA is not found.
  """
  logger.debug("Running NuPIC Tests.")
  with changeToWorkingDir(env["NUPIC"]):
    try:
      log.printEnv(env, logger)
      runWithOutput("bin/py_region_test", env, logger)
      testCommand = "scripts/run_nupic_tests -u --coverage --results xml"
      runWithOutput(testCommand, env, logger)
    except:
      logger.exception("NuPIC Tests have failed.")
      raise
    else:
      resultFile = glob.glob("%s/tests/results/xunit/*/*.xml" % env["NUPIC"])[0]
      logger.debug("Copying results to results folder.")
      shutil.move(resultFile, createOrReplaceResultsDir())
      logger.info("NuPIC tests have passed")
예제 #4
0
def buildHtmIt(env, pipelineConfig, logger):
    """
    Builds HTM-IT with given HTM-IT SHA.
    :param env: The environment which will be set before building

    :param pipelineConfig: dict of the pipeline config values, e.g.:
      {
        "buildWorkspace": "/path/to/build/in",
        "htmitRemote": "[email protected]:Numenta/numenta-apps.git",
        "htmitBranch": "master",
        "htmItSha": "HEAD",
        "pipelineParams": "{dict of parameters}",
        "pipelineJson": "/path/to/json/file"
      }

    :param logger: Logger object.

    :raises
      infrastructure.utilities.exceptions.BuildFailureException:
      This exception is raised if build fails.
  """
    try:
        sitePackagesDir = os.path.join(env["PRODUCTS"], "htm-it/lib/python2.7/site-packages")
        if not os.path.exists(sitePackagesDir):
            os.makedirs(sitePackagesDir)

        # Setup the baseline configuration
        with changeToWorkingDir(env["HTM_IT_HOME"]):
            runWithOutput("python setup.py configure_htm_it", env=env, logger=logger)
    except:
        logger.exception("Unknown failure")
        raise BuildFailureException("HTM-IT building failed. Exiting.")
예제 #5
0
def fetchNuPICCoreFromS3(buildWorkspace, nupicCoreSHA, logger):
  """
    Downloads archieved nupic.core from S3

    :param buildWorkspace: The workspace where nupic.core will be built
    :param nupicCoreSHA: The SHA of the nupic.core build that needs to be
      fetched
  """
  logger.info("Downloading nupic.core from S3.")
  cachedDir = "/var/build/nupic.core/%s" % nupicCoreSHA
  with changeToWorkingDir(buildWorkspace):
    nupicCoreFilePath = s3.downloadFileFromS3("builds.numenta.com",
                          "builds_nupic_core/nupic.core-%s.zip" % nupicCoreSHA,
                          logger)

    logger.info("Untarring %s", nupicCoreFilePath)
    command = "tar xzvf %s -C %s" % (nupicCoreFilePath, cachedDir)
    try:
      os.makedirs(cachedDir)
      runWithOutput(command, logger=logger)
    except OSError:
      logger.exception("Cached nupic.core already exists at %s", cachedDir)
      raise
    except CommandFailedError:
      logger.exception("Failed while untarring cached nupic.core: %s", command)
      raise
    else:
      logger.info("nupic.core downloaded from S3 & stored at %s", cachedDir)
예제 #6
0
def main(jsonArgs=None):
  """
    Main function.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson" : <PIPELINE_JSON_PATH>,
            "logLevel" : <LOG_LEVEL>}

  """
  jsonArgs = jsonArgs or {}
  testResult = False
  try:
    (buildWorkspace, pipelineParams, pipelineJson) = addAndParseArgs(jsonArgs)

    os.environ["BUILD_WORKSPACE"] = buildWorkspace
    env = prepareEnv(buildWorkspace, None, os.environ)

    testResult = runUnitTests(env=env)
    # Write testResult to JSON file if JSON file driven run
    if pipelineJson:
      pipelineParams["test"] = {"testStatus" : testResult}
      with open(pipelineJson, 'w') as fp:
        fp.write(json.dumps(pipelineParams, ensure_ascii=False))
      runWithOutput("cat %s" % pipelineJson)
    # In any case log success/failure to console and exit accordingly
    exitStatus = int(not testResult)
    if exitStatus:
      g_logger.error("Test Failure!!!")
    else:
      g_logger.debug("All tests passed")
    return exitStatus
  except:
    g_logger.exception("Unknown error occurred while running unit tests")
    raise
예제 #7
0
def uploadToSauceLab(apkPath, apkName, uploadName, logger):
  """
    Uploads the APK to saucelab

    :param apkPath: Path to the APK
    :param apkName: Name of the APK in the artifacts directory
    :param uploadName: Name of the apk to upload to saucelab
    :param logger: An initialized logger.

    :raises: CommandFailedError when the `curl` command fails.
  """

  user = os.environ["SAUCE_USER_NAME"]
  key = os.environ["SAUCE_KEY"]
  sauceUrl = "https://saucelabs.com/rest/v1/storage"
  command = ("curl -u %s:%s -X POST"
             " %s/%s/%s?overwrite=true"
             " -H Content-Type:application/octet-stream --data-binary @%s" %
             (user, key, sauceUrl, user, uploadName, apkName))

  with changeToWorkingDir(apkPath):
    try:
      logger.info("---------------- Uploading to saucelabs ----------------")
      runWithOutput(command, logger=logger)
    except CommandFailedError:
      logger.exception("Failed to upload APK to saucelab.")
      raise
예제 #8
0
def fetchNuPICCoreFromS3(buildWorkspace, nupicCoreSHA, logger):
    """
    Downloads archieved nupic.core from S3

    :param buildWorkspace: The workspace where nupic.core will be built
    :param nupicCoreSHA: The SHA of the nupic.core build that needs to be
      fetched
  """
    logger.info("Downloading nupic.core from S3.")
    cachedDir = "/var/build/nupic.core/%s" % nupicCoreSHA
    with changeToWorkingDir(buildWorkspace):
        nupicCoreFilePath = s3.downloadFileFromS3(
            "builds.numenta.com",
            "builds_nupic_core/nupic.core-%s.zip" % nupicCoreSHA, logger)

        logger.info("Untarring %s", nupicCoreFilePath)
        command = "tar xzvf %s -C %s" % (nupicCoreFilePath, cachedDir)
        try:
            os.makedirs(cachedDir)
            runWithOutput(command, logger=logger)
        except OSError:
            logger.exception("Cached nupic.core already exists at %s",
                             cachedDir)
            raise
        except CommandFailedError:
            logger.exception("Failed while untarring cached nupic.core: %s",
                             command)
            raise
        else:
            logger.info("nupic.core downloaded from S3 & stored at %s",
                        cachedDir)
예제 #9
0
def buildGrok(env, pipelineConfig, logger):
    """
    Builds Grok with given Grok SHA.
    :param env: The environment which will be set before building

    :param pipelineConfig: dict of the pipeline config values, e.g.:
      {
        "buildWorkspace": "/path/to/build/in",
        "grokRemote": "[email protected]:Numenta/numenta-apps.git",
        "grokBranch": "master",
        "grokSha": "HEAD",
        "pipelineParams": "{dict of parameters}",
        "pipelineJson": "/path/to/json/file"
      }

    :param logger: Logger object.

    :raises
      infrastructure.utilities.exceptions.BuildFailureException:
      This exception is raised if build fails.
  """
    try:
        sitePackagesDir = os.path.join(env["PRODUCTS"],
                                       "grok/lib/python2.7/site-packages")
        if not os.path.exists(sitePackagesDir):
            os.makedirs(sitePackagesDir)

        # Setup the baseline configuration
        with changeToWorkingDir(env["GROK_HOME"]):
            runWithOutput("python setup.py configure_grok",
                          env=env,
                          logger=logger)
    except:
        logger.exception("Unknown failure")
        raise BuildFailureException("Grok building failed. Exiting.")
예제 #10
0
def runTests(env, logger):
    """
    Runs NuPIC tests.

    :param env: The environment which will be set for runnung tests.

    :raises:
      infrastructure.utilities.exceptions.NupicBuildFailed
    if the given SHA is not found.
  """
    logger.debug("Running NuPIC Tests.")
    with changeToWorkingDir(env["NUPIC"]):
        try:
            log.printEnv(env, logger)
            runWithOutput("bin/py_region_test", env, logger)
            testCommand = "scripts/run_nupic_tests -u --coverage --results xml"
            runWithOutput(testCommand, env, logger)
        except:
            logger.exception("NuPIC Tests have failed.")
            raise
        else:
            resultFile = glob.glob("%s/tests/results/xunit/*/*.xml" %
                                   env["NUPIC"])[0]
            logger.debug("Copying results to results folder.")
            shutil.move(resultFile, createOrReplaceResultsDir())
            logger.info("NuPIC tests have passed")
예제 #11
0
def runUnitTests(env, buildWorkspace):
  """
    Calls `grok/run_tests.sh` to run the unit tests

    :param dict env: Current environ set for GROK_HOME, etc
    :param str buildWorkspace: /path/to/buildWorkspace

    :returns: return True if tests are successful
    :rtype: bool
  """
  rawResultsFile = os.path.join(buildWorkspace, "numenta-apps", "grok", "tests",
                                "results", "py2", "xunit", "jenkins",
                                "results.xml")
  finalResultsFile = os.path.join(prepareResultsDir(),
                                  "unit_tests_%s_results.xml" %
                                    getBuildNumber(logger=g_logger))


  with changeToWorkingDir(os.path.join(buildWorkspace, "numenta-apps", "grok")):
    try:
      runWithOutput(command=("./run_tests.sh --unit --language py --results "
                             "jenkins"),
                    env=env,
                    logger=g_logger)
    except CommandFailedError:
      g_logger.exception("Failed to run unit tests")
      raise
    finally:
      shutil.move(rawResultsFile, finalResultsFile)

  return analyzeResults(resultsPath=finalResultsFile)
예제 #12
0
def installNuPICWheel(env, installDir, wheelFilePath, logger):
    """
  Install a NuPIC Wheel to a specified location.

  :param env: The environment dict
  :param installDir: The root folder to install to. NOTE: pip will automatically
    create lib/pythonX.Y/site-packages and bin folders to install libraries
    and executables to. Make sure both of those sub-folder locations are already
    on your PYTHONPATH and PATH respectively.
  :param wheelFilePath: location of the NuPIC wheel that will be installed
  :param logger: initialized logger object
  """
    try:
        if installDir is None:
            raise PipelineError("Please provide NuPIC install directory.")
        logger.debug("Installing %s to %s", wheelFilePath, installDir)
        pipCommand = ("pip install %s --install-option=--prefix=%s" %
                      (wheelFilePath, installDir))
        runWithOutput(pipCommand, env=env, logger=logger)

    except:
        logger.exception("Failed to install NuPIC wheel")
        raise CommandFailedError("Installing NuPIC wheel failed.")
    else:
        logger.debug("NuPIC wheel installed successfully.")
def runUnitTests(env, buildWorkspace):
  """
    Calls `htm-it/run_tests.sh` to run the unit tests

    :param dict env: Current environ set for HTM_IT_HOME, etc
    :param str buildWorkspace: /path/to/buildWorkspace

    :returns: return True if tests are successful
    :rtype: bool
  """
  rawResultsFile = os.path.join(buildWorkspace, "numenta-apps", "htm.it", "tests",
                                "results", "py2", "xunit", "jenkins",
                                "results.xml")
  finalResultsFile = os.path.join(prepareResultsDir(),
                                  "unit_tests_%s_results.xml" %
                                    getBuildNumber(logger=g_logger))


  with changeToWorkingDir(os.path.join(buildWorkspace, "numenta-apps", "htm.it")):
    try:
      runWithOutput(command=("./run_tests.sh --unit --language py --results "
                             "jenkins"),
                    env=env,
                    logger=g_logger)
    except CommandFailedError:
      g_logger.exception("Failed to run unit tests")
      raise
    finally:
      shutil.move(rawResultsFile, finalResultsFile)

  return analyzeResults(resultsPath=finalResultsFile)
예제 #14
0
def cleanseFakeroot(fakeroot, installDirectory, repoDirectory):
  """Clean up a fakeroot by running prepare_repo_for_packaging if present in
  the repoDirectory. Will be called by prepFakerootFromGit and
  prepFakerootFromDirectory for each of their subdirectories.

  :param fakeroot: path to the directory to use as the root of the RPM's
    install tree
  :param installDirectory: To construct the path relative to fakeroot for
    cleanup.
  :param repoDirectory: It also used to construct the path relative to fakeroot
    for cleanup.
  """

  # Don't count on prepare_repo_for_packaging coping well if it is not run
  # from the root of the repo checkout, so store the pwd and cd into the repo
  # checkout before running cleaner or removing .YOMP
  g_logger.debug("Cleaning fakeroot: %s", fakeroot)
  workpath = "%s/%s/%s" % (fakeroot, installDirectory, repoDirectory)
  if os.path.isdir(workpath):
    with changeToWorkingDir(workpath):
      cleanerScript = "%s/%s/%s/prepare_repo_for_packaging" % (fakeroot,
                                                               installDirectory,
                                                               repoDirectory)
      if os.path.isfile(cleanerScript):
        if os.path.isfile("/tmp/noclean.rpms"):
          g_logger.info("Found /tmp/noclean.rpms, skipping cleanup script")
        else:
          g_logger.info("Found %s, executing", cleanerScript)
          runWithOutput("%s --destroy-all-my-work" % cleanerScript)
      else:
        g_logger.debug("Optional cleanup script %s not found, skipping",
                       cleanerScript)
예제 #15
0
def installNuPICWheel(env, installDir, wheelFilePath, logger):
  """
  Install a NuPIC Wheel to a specified location.

  :param env: The environment dict
  :param installDir: The root folder to install to. NOTE: pip will automatically
    create lib/pythonX.Y/site-packages and bin folders to install libraries
    and executables to. Make sure both of those sub-folder locations are already
    on your PYTHONPATH and PATH respectively.
  :param wheelFilePath: location of the NuPIC wheel that will be installed
  :param logger: initialized logger object
  """
  try:
    if installDir is None:
      raise PipelineError("Please provide NuPIC install directory.")
    logger.debug("Installing %s to %s", wheelFilePath, installDir)
    pipCommand = ("pip install %s --install-option=--prefix=%s" %
                  (wheelFilePath, installDir))
    runWithOutput(pipCommand, env=env, logger=logger)

  except:
    logger.exception("Failed to install NuPIC wheel")
    raise CommandFailedError("Installing NuPIC wheel failed.")
  else:
    logger.debug("NuPIC wheel installed successfully.")
예제 #16
0
def buildNuPICCore(env, nupicCoreSha, logger, buildWorkspace):
  """
    Builds nupic.core

    :param dict env: The environment which will be set before building.
    :param str nupicCoreSha: The SHA which will be built.
    :param logger: An initialized logger
    :param str buildWorkspace: /path/to/buildWorkspace

    :raises infrastructure.utilities.exceptions.NupicBuildFailed:
      This exception is raised if build fails.
  """
  with changeToWorkingDir(env["NUPIC_CORE_DIR"]):
    try:
      logger.debug("Building nupic.core SHA : %s ", nupicCoreSha)
      git.resetHard(sha=nupicCoreSha, logger=logger)

      capnpTmp = buildCapnp(env, logger)

      # install pre-reqs into  the build workspace for isolation
      runWithOutput(command=("pip install -r bindings/py/requirements.txt "
                             "--install-option=--prefix=%s "
                             "--ignore-installed" % buildWorkspace),
                            env=env, logger=logger)
      shutil.rmtree("build", ignore_errors=True)
      mkdirp("build/scripts")
      with changeToWorkingDir("build/scripts"):
        libdir = sysconfig.get_config_var('LIBDIR')
        runWithOutput(("cmake ../../src -DCMAKE_INSTALL_PREFIX=../release "
                       "-DCMAKE_PREFIX_PATH={} "
                       "-DPYTHON_LIBRARY={}/libpython2.7.so").format(
                           capnpTmp, libdir),
                      env=env, logger=logger)
        runWithOutput("make -j 4", env=env, logger=logger)
        runWithOutput("make install", env=env, logger=logger)

      # need to remove this folder to allow the caching process to work
      shutil.rmtree("external/linux32arm")

      # build the distributions
      nupicBindingsEnv = env.copy()
      nupicBindingsEnv["CPPFLAGS"] = "-I{}".format(
          os.path.join(capnpTmp, "include"))
      nupicBindingsEnv["LDFLAGS"] = "-L{}".format(
          os.path.join(capnpTmp, "lib"))
      command = (
          "python setup.py install --prefix={} --nupic-core-dir={}".format(
              buildWorkspace, os.path.join(os.getcwd(), "build", "release")))
      # Building on jenkins, not local
      if "JENKINS_HOME" in os.environ:
        command += " bdist_wheel bdist_egg upload -r numenta-pypi"
      runWithOutput(command=command, env=nupicBindingsEnv, logger=logger)
    except:
      logger.exception("Failed to build nupic.core")
      raise
    else:
      logger.info("nupic.core building was successful.")
예제 #17
0
  def sanitizeSrvSalt(self, saltpath):
    """
    Ensure only whitelisted files & directories are installed to /srv/salt by
    the RPM.

    Numenta convention is to only include explicitly whitelisted formulas
    and files in RPMs deployed to customer machines.

    We add a PUBLIC file at the top level of a formula's directory tree
    to add it to the whitelist.

    This prevents us from accidentally publishing internal-only files to
    customer machines.

    :param saltpath: Path to /srv/salt in the fakeroot
    """

    logger = self.logger
    fileWhitelist = ["bootstrap.sh",
                     "top.sls"
                    ]

    logger.debug("Sanitizing %s", saltpath)
    for artifact in os.listdir(saltpath):
      artifactPath = "%s/%s" % (saltpath, artifact)
      if os.path.isfile(artifactPath):
        if artifact not in fileWhitelist:
          logger.debug("Purging %s", artifact)
          rmrf(artifactPath)
      if os.path.isdir(artifactPath):
        # Formula directories have to be explicitly whitelisted by having
        # a PUBLIC file or they will be purged from the salt tree.
        if not os.path.isfile("%s/PUBLIC" % artifactPath):
          logger.debug("Purging %s", artifact)
          rmrf(artifactPath)
        else:
          logger.info("packaging formula %s", artifact)

    # AWS requires that we don't include keys in marketplace AMIs.
    # Purge any pubkeys in the salt tree
    # Note that we _don't_ quote the wildcard here so that check_call
    # passes it to find correctly when it is called by runWithOutput.
    # Same for the {} and ;
    findPubkeys = """find %s -name *.pub -exec rm -fv {} ;""" % saltpath
    logger.debug("**************************************************")
    logger.debug("Sanitizing %s with %s", saltpath, findPubkeys)
    runWithOutput(findPubkeys, logger=logger)

    # Purge pemfiles
    findPemFiles = """find %s -name *.pem -exec rm -fv {} ;""" % saltpath
    logger.debug("**************************************************")
    logger.debug("Sanitizing %s with %s", saltpath, findPubkeys)
    runWithOutput(findPemFiles, logger=logger)
예제 #18
0
    def sanitizeSrvSalt(self, saltpath):
        """
    Ensure only whitelisted files & directories are installed to /srv/salt by
    the RPM.

    Numenta convention is to only include explicitly whitelisted formulas
    and files in RPMs deployed to customer machines.

    We add a PUBLIC file at the top level of a formula's directory tree
    to add it to the whitelist.

    This prevents us from accidentally publishing internal-only files to
    customer machines.

    :param saltpath: Path to /srv/salt in the fakeroot
    """

        logger = self.logger
        fileWhitelist = ["bootstrap.sh", "top.sls"]

        logger.debug("Sanitizing %s", saltpath)
        for artifact in os.listdir(saltpath):
            artifactPath = "%s/%s" % (saltpath, artifact)
            if os.path.isfile(artifactPath):
                if artifact not in fileWhitelist:
                    logger.debug("Purging %s", artifact)
                    rmrf(artifactPath)
            if os.path.isdir(artifactPath):
                # Formula directories have to be explicitly whitelisted by having
                # a PUBLIC file or they will be purged from the salt tree.
                if not os.path.isfile("%s/PUBLIC" % artifactPath):
                    logger.debug("Purging %s", artifact)
                    rmrf(artifactPath)
                else:
                    logger.info("packaging formula %s", artifact)

        # AWS requires that we don't include keys in marketplace AMIs.
        # Purge any pubkeys in the salt tree
        # Note that we _don't_ quote the wildcard here so that check_call
        # passes it to find correctly when it is called by runWithOutput.
        # Same for the {} and ;
        findPubkeys = """find %s -name *.pub -exec rm -fv {} ;""" % saltpath
        logger.debug("**************************************************")
        logger.debug("Sanitizing %s with %s", saltpath, findPubkeys)
        runWithOutput(findPubkeys, logger=logger)

        # Purge pemfiles
        findPemFiles = """find %s -name *.pem -exec rm -fv {} ;""" % saltpath
        logger.debug("**************************************************")
        logger.debug("Sanitizing %s with %s", saltpath, findPubkeys)
        runWithOutput(findPemFiles, logger=logger)
예제 #19
0
def buildNuPICCore(env, nupicCoreSHA, logger):
    """
    Builds nupic.core

    :param env: The environment which will be set before building.
    :param nupicCoreSHA: The SHA which will be built.

    :raises
      infrastructure.utilities.exceptions.NupicBuildFailed:
      This exception is raised if build fails.
  """
    print "\n----------Building nupic.core------------"
    log.printEnv(env, logger)
    with changeToWorkingDir(env["NUPIC_CORE_DIR"]):
        try:
            logger.debug("Building nupic.core SHA : %s ", nupicCoreSHA)
            git.resetHard(nupicCoreSHA)
            runWithOutput("mkdir -p build/scripts", env, logger)
            with changeToWorkingDir("build/scripts"):
                runWithOutput(
                    "cmake ../../src -DCMAKE_INSTALL_PREFIX=../release", env,
                    logger)
                runWithOutput("make -j 4", env, logger)
                runWithOutput("make install", env, logger)
        except CommandFailedError:
            raise NupicBuildFailed("nupic.core building failed.Exiting")
        except:
            raise PipelineError(
                "nupic.core building failed due to unknown reason.")
        else:
            logger.info("nupic.core building was successful.")
예제 #20
0
def buildNuPICCore(env, nupicCoreSHA, logger):
  """
    Builds nupic.core

    :param env: The environment which will be set before building.
    :param nupicCoreSHA: The SHA which will be built.

    :raises
      infrastructure.utilities.exceptions.NupicBuildFailed:
      This exception is raised if build fails.
  """
  print "\n----------Building nupic.core------------"
  log.printEnv(env, logger)
  with changeToWorkingDir(env["NUPIC_CORE_DIR"]):
    try:
      logger.debug("Building nupic.core SHA : %s ", nupicCoreSHA)
      git.resetHard(nupicCoreSHA)
      runWithOutput("mkdir -p build/scripts", env, logger)
      with changeToWorkingDir("build/scripts"):
        runWithOutput("cmake ../../src -DCMAKE_INSTALL_PREFIX=../release", env,
                      logger)
        runWithOutput("make -j 4", env, logger)
        runWithOutput("make install", env, logger)
    except CommandFailedError:
      raise NupicBuildFailed("nupic.core building failed.Exiting")
    except:
      raise PipelineError("nupic.core building failed due to unknown reason.")
    else:
      logger.info("nupic.core building was successful.")
예제 #21
0
def buildNuPIC(env, logger):
    """
    Builds NuPIC

    :param env: The environment which will be set before building

    :raises
      infrastructure.utilities.exceptions.NupicBuildFailed:
      This exception is raised if build fails.
  """
    print "\n----------Building NuPIC------------"
    log.printEnv(env, logger)

    # Build
    with changeToWorkingDir(env["NUPIC"]):
        try:
            try:
                shutil.rmtree("build")
            except OSError:
                # didn't exist, so just pass
                pass

            # install requirements
            runWithOutput(
                "pip install --install-option=--prefix=%s --requirement "
                "external/common/requirements.txt" % env["NTA"],
                env=env,
                logger=logger)
            # need to remove this folder for wheel build to work
            shutil.rmtree("external/linux32arm")

            # build the wheel
            command = (
                "python setup.py bdist_wheel bdist_egg --nupic-core-dir=%s" %
                os.path.join(env["NUPIC_CORE_DIR"], "build", "release"))
            # Building on jenkins, not local
            if "JENKINS_HOME" in env:
                command += " upload -r numenta-pypi"

            runWithOutput(command, env=env, logger=logger)
        except:
            logger.exception("Failed while building nupic")
            raise NupicBuildFailed("NuPIC building failed.")
        else:
            open("nupic.stamp", "a").close()
            logger.debug("NuPIC building was successful.")
예제 #22
0
def buildNuPICCore(env, nupicCoreSha, logger):
    """
    Builds nupic.core

    :param env: The environment which will be set before building.
    :param nupicCoreSha: The SHA which will be built.

    :raises
      infrastructure.utilities.exceptions.NupicBuildFailed:
      This exception is raised if build fails.
  """
    print "\n----------Building nupic.core------------"
    diagnostics.printEnv(env=env, logger=logger)
    with changeToWorkingDir(env["NUPIC_CORE_DIR"]):
        try:
            logger.debug("Building nupic.core SHA : %s ", nupicCoreSha)
            git.resetHard(nupicCoreSha)
            runWithOutput("mkdir -p build/scripts", env=env, logger=logger)
            with changeToWorkingDir("build/scripts"):
                libdir = sysconfig.get_config_var("LIBDIR")
                runWithOutput(
                    ("cmake ../../src -DCMAKE_INSTALL_PREFIX=../release " "-DPYTHON_LIBRARY={}/libpython2.7.so").format(
                        libdir
                    ),
                    env=env,
                    logger=logger,
                )
                runWithOutput("make -j 4", env=env, logger=logger)
                runWithOutput("make install", env=env, logger=logger)

            # need to remove this folder to allow the caching process to work
            shutil.rmtree("external/linux32arm")

            # build the distributions
            command = "python setup.py install --force"
            # Building on jenkins, not local
            if "JENKINS_HOME" in os.environ:
                command += " bdist_wheel bdist_egg upload -r numenta-pypi"
            runWithOutput(command=command, env=env, logger=logger)
        except CommandFailedError:
            raise NupicBuildFailed("nupic.core building failed.Exiting")
        except:
            raise PipelineError("nupic.core building failed due to unknown reason.")
        else:
            logger.info("nupic.core building was successful.")
예제 #23
0
def buildCapnp(env, logger):
    """Builds capnp

    :param dict env: The environment which will be set before building.
    :param logger: An initialized logger

    :returns: Prefix path for capnp.

    :raises infrastructure.utilities.exceptions.NupicBuildFailed:
      This exception is raised if build fails.
  """
    with changeToWorkingDir(env["NUPIC_CORE_DIR"]):
        try:
            mkdirp("capnp_tmp")
            with changeToWorkingDir("capnp_tmp"):
                runWithOutput([
                    "curl", "-O",
                    "https://capnproto.org/capnproto-c++-0.5.2.tar.gz"
                ],
                              env=env,
                              logger=logger)
                runWithOutput(["tar", "zxf", "capnproto-c++-0.5.2.tar.gz"],
                              env=env,
                              logger=logger)
                capnpTmp = os.getcwd()
                with changeToWorkingDir("capnproto-c++-0.5.2"):
                    capnpEnv = env.copy()
                    capnpEnv["CXXFLAGS"] = (
                        "-fPIC -std=c++11 -m64 -fvisibility=hidden -Wall -Wreturn-type "
                        "-Wunused -Wno-unused-parameter")
                    runWithOutput([
                        "./configure", "--disable-shared",
                        "--prefix={}".format(capnpTmp)
                    ],
                                  env=capnpEnv,
                                  logger=logger)
                    runWithOutput("make -j4", env=env, logger=logger)
                    runWithOutput("make install", env=env, logger=logger)
                return capnpTmp
        except Exception:
            logger.exception("capnp building failed due to unknown reason.")
            raise
        else:
            logger.info("capnp building was successful.")
예제 #24
0
def buildNuPICCore(env, nupicCoreSha, logger):
    """
    Builds nupic.core

    :param env: The environment which will be set before building.
    :param nupicCoreSha: The SHA which will be built.

    :raises
      infrastructure.utilities.exceptions.NupicBuildFailed:
      This exception is raised if build fails.
  """
    print "\n----------Building nupic.core------------"
    diagnostics.printEnv(env=env, logger=logger)
    with changeToWorkingDir(env["NUPIC_CORE_DIR"]):
        try:
            logger.debug("Building nupic.core SHA : %s ", nupicCoreSha)
            git.resetHard(nupicCoreSha)
            runWithOutput("mkdir -p build/scripts", env=env, logger=logger)
            with changeToWorkingDir("build/scripts"):
                libdir = sysconfig.get_config_var('LIBDIR')
                runWithOutput(
                    ("cmake ../../src -DCMAKE_INSTALL_PREFIX=../release "
                     "-DPYTHON_LIBRARY={}/libpython2.7.so").format(libdir),
                    env=env,
                    logger=logger)
                runWithOutput("make -j 4", env=env, logger=logger)
                runWithOutput("make install", env=env, logger=logger)

            # need to remove this folder to allow the caching process to work
            shutil.rmtree("external/linux32arm")

            # build the distributions
            command = "python setup.py install --force"
            # Building on jenkins, not local
            if "JENKINS_HOME" in os.environ:
                command += " bdist_wheel bdist_egg upload -r numenta-pypi"
            runWithOutput(command=command, env=env, logger=logger)
        except CommandFailedError:
            raise NupicBuildFailed("nupic.core building failed.Exiting")
        except:
            raise PipelineError(
                "nupic.core building failed due to unknown reason.")
        else:
            logger.info("nupic.core building was successful.")
예제 #25
0
  def installProductsIntoHTMITFakeroot(self):
    """
    Clone our git repo into the fakeroot directory tree.

    If we're configured to use a site-packages tarball; burst it.

    :returns: SHA of the products repo in the fakeroot
    """

    config = self.config
    fakeroot = self.fakeroot
    logger = self.logger
    numentaPath = os.path.join(fakeroot, "opt", "numenta")
    logger.debug("Creating %s", numentaPath)
    mkpath(numentaPath)

    logger.debug("Cloning...")
    realSHA = rpm.gitCloneIntoFakeroot(fakeroot=fakeroot,
                                       installDirectory="opt/numenta",
                                       repoDirectory="products",
                                       gitURL=config.gitURL,
                                       logger=logger,
                                       sha=config.sha)

    logger.debug("Creating site-packages if required")
    libPython = os.path.join(fakeroot,
                             "opt",
                             "numenta",
                             "products",
                             "htm-it",
                             "lib",
                             "python2.7")

    mkpath(os.path.join(libPython, "site-packages"))

    # Burst site-packages tarball if set on command line
    if config.sitePackagesTarball:
      with changeToWorkingDir(libPython):
        logger.debug("Bursting %s in %s",
                     config.sitePackagesTarball,
                     libPython)
        runWithOutput("tar xf %s" % config.sitePackagesTarball)

    return realSHA
예제 #26
0
def buildNuPIC(env, logger):
  """
    Builds NuPIC

    :param env: The environment which will be set before building

    :raises
      infrastructure.utilities.exceptions.NupicBuildFailed:
      This exception is raised if build fails.
  """
  print "\n----------Building NuPIC------------"
  log.printEnv(env, logger)

  # Build
  with changeToWorkingDir(env["NUPIC"]):
    try:
      try:
        shutil.rmtree("build")
      except OSError:
        # didn't exist, so just pass
        pass

      # install requirements
      runWithOutput("pip install --install-option=--prefix=%s --requirement "
                    "external/common/requirements.txt" % env["NTA"],
                    env=env, logger=logger)
      # need to remove this folder for wheel build to work
      shutil.rmtree("external/linux32arm")

      # build the wheel
      command = ("python setup.py bdist_wheel bdist_egg --nupic-core-dir=%s" %
          os.path.join(env["NUPIC_CORE_DIR"], "build", "release"))
      # Building on jenkins, not local
      if "JENKINS_HOME" in env:
        command += " upload -r numenta-pypi"

      runWithOutput(command, env=env, logger=logger)
    except:
      logger.exception("Failed while building nupic")
      raise NupicBuildFailed("NuPIC building failed.")
    else:
      open("nupic.stamp", "a").close()
      logger.debug("NuPIC building was successful.")
예제 #27
0
    def cleanScripts(self):
        """
    Cleans the htm-it directory before packaging.
    """

        productsDirectory = self.productsDirectory
        config = self.config
        environment = self.environment
        logger = self.logger

        logger.info("Running cleanup scripts...")
        for cleaner in config.cleanupScripts:
            cleanerPath = os.path.join(productsDirectory, cleaner)
            workDirectory = os.path.dirname(cleanerPath)
            logger.debug("Changing to %s", workDirectory)
            logger.debug("Attempting to run %s", cleanerPath)
            if not os.path.exists(cleanerPath):
                raise InvalidParametersError("%s does not exist!" % cleanerPath)
            with changeToWorkingDir(workDirectory):
                runWithOutput("%s %s" % (cleanerPath, "--destroy-all-my-work"), env=environment)
예제 #28
0
def cacheNuPICCore(env, buildWorkspace, nupicCoreSHA, uploadToS3, logger):
    """
    Caches nupic.core to /var/build/NuPIC.core/<SHA> and uploads to S3

    :param env: The environment dict
    :param buildWorkspace: The buildWorkspace were nupic.core is built
    :param nupicSha: A `string` representing SHA
    :param uploadToS3: `boolean` defining whether to upload to S3 or not

    :raises: CommandFailedError if the tar process fails before upload.
  """
    cachedPath = "/var/build/nupic.core/%s" % nupicCoreSHA

    if not os.path.isdir(cachedPath):
        logger.info("Caching nupic.core to %s", cachedPath)

        with changeToWorkingDir(buildWorkspace):
            shutil.copytree(
                "nupic.core",
                ("/var/build/nupic.core/%s/nupic.core" % nupicCoreSHA))

            if uploadToS3:
                nupicCoreZip = "nupic.core-%s.zip" % nupicCoreSHA

                logger.info("Archiving nupic.core to %s", nupicCoreZip)
                command = "tar czf %s nupic.core" % nupicCoreZip

                nupicCoreZipPath = "%s/%s" % (buildWorkspace, nupicCoreZip)
                try:
                    runWithOutput(command, env, logger=logger)
                    logger.debug("Uploading %s to S3.", nupicCoreZip)
                    s3.uploadToS3(g_config, nupicCoreZipPath,
                                  "builds_nupic_core", logger)
                except:
                    logger.exception("Archiving nupic.core failed.")
                    raise CommandFailedError("Archiving nupic.core failed.")
                else:
                    logger.info("nupic.core cached locally and to S3.")

    else:
        logger.debug("Cached nupic.core already exists.")
예제 #29
0
def cacheNuPICCore(env, buildWorkspace, nupicCoreSHA, uploadToS3, logger):
  """
    Caches nupic.core to /var/build/NuPIC.core/<SHA> and uploads to S3

    :param env: The environment dict
    :param buildWorkspace: The buildWorkspace were nupic.core is built
    :param nupicSha: A `string` representing SHA
    :param uploadToS3: `boolean` defining whether to upload to S3 or not

    :raises: CommandFailedError if the tar process fails before upload.
  """
  cachedPath = "/var/build/nupic.core/%s" % nupicCoreSHA

  if not os.path.isdir(cachedPath):
    logger.info("Caching nupic.core to %s", cachedPath)

    with changeToWorkingDir(buildWorkspace):
      shutil.copytree("nupic.core", ("/var/build/nupic.core/%s/nupic.core" %
                                     nupicCoreSHA))

      if uploadToS3:
        nupicCoreZip = "nupic.core-%s.zip" % nupicCoreSHA

        logger.info("Archiving nupic.core to %s", nupicCoreZip)
        command = "tar czf %s nupic.core" % nupicCoreZip

        nupicCoreZipPath = "%s/%s" % (buildWorkspace, nupicCoreZip)
        try:
          runWithOutput(command, env, logger=logger)
          logger.debug("Uploading %s to S3.", nupicCoreZip)
          s3.uploadToS3(g_config, nupicCoreZipPath,
                        "builds_nupic_core", logger)
        except:
          logger.exception("Archiving nupic.core failed.")
          raise CommandFailedError("Archiving nupic.core failed.")
        else:
          logger.info("nupic.core cached locally and to S3.")

  else:
    logger.debug("Cached nupic.core already exists.")
예제 #30
0
def runTestCommand(testCommand, env, outputFile=None):
  """
    Runs given test command with provided environment

    :param testCommand: Test command that is suppose to be run
    :param env: Current environ set for GROK_HOME, etc
    :param outputFile: Optional, Path for output file where stdout should be
      redirected. It is passed only if the test are nonXunitTest, as the
      results are not generated as xml we need redirect them to a text file.
    :returns: return True if tests are successful, False otherwise
    :rtype: bool

  """
  try:
    if outputFile:
      check_call(testCommand, shell=True, env=env,
                 stdout=open(outputFile, "w"))
      # Updating console
      runWithOutput("cat %s" % outputFile, env=env, logger=g_logger)
    else:
      runWithOutput(testCommand, env=env, logger=g_logger)
    return True
  except CommandFailedError:
    if outputFile:
      runWithOutput("cat %s" % outputFile)
    g_logger.error("Error executing %s\n*Most likely cause is a test FAILURE*",
                   testCommand)
    return False
예제 #31
0
def main(jsonArgs=None):
    """
    Main function.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson" : <PIPELINE_JSON_PATH>,
            "logLevel" : <LOG_LEVEL>}

  """
    jsonArgs = jsonArgs or {}
    testResult = False
    try:
        (pipeline, buildWorkspace, grokSha, pipelineParams,
         pipelineJson) = addAndParseArgs(jsonArgs)

        os.environ["BUILD_WORKSPACE"] = buildWorkspace
        env = prepareEnv(buildWorkspace, None, os.environ)

        # Tests are failing without LD_LIBRARY_PATH, HACK
        env.update(
            LD_LIBRARY_PATH="/opt/numenta/anaconda/lib:/usr/lib64:/usr/lib")

        testResult = runUnitTests(env, pipeline, grokSha, g_logger)
        # Write testResult to JSON file if JSON file driven run
        if pipelineJson:
            pipelineParams["test"] = {"testStatus": testResult}
            with open(pipelineJson, 'w') as fp:
                fp.write(json.dumps(pipelineParams, ensure_ascii=False))
            runWithOutput("cat %s" % pipelineJson)
        # In any case log success/failure to console and exit accordingly
        exitStatus = int(not testResult)
        if exitStatus:
            g_logger.error("Test Failure!!!")
        else:
            g_logger.debug("All tests passed")
        return exitStatus
    except:
        g_logger.exception("Unknown error occurred while running unit tests")
        raise
예제 #32
0
def runTestCommand(testCommand, env, logger, outputFile=None):
    """
    Runs given test command with provided environment

    :param testCommand: Test command that is suppose to be run
    :param env: Current environ set for GROK_HOME, etc
    :param outputFile: Optional, Path for output file where stdout should be
      redirected. It is passed only if the test are nonXunitTest, as the
      results are not generated as xml we need redirect them to a text file.
    :returns: return True if tests are successful, False otherwise
    :rtype: bool

  """
    try:
        if outputFile:
            check_call(testCommand,
                       shell=True,
                       env=env,
                       stdout=open(outputFile, "w"))
            # Updating console
            runWithOutput("cat %s" % outputFile)
        else:
            runWithOutput(testCommand, env=env, logger=logger)

        logger.info("\n\n###### COMPLETED %s tests ######\n\n" % testCommand)
        return True
    except CommandFailedError:
        if outputFile:
            runWithOutput("cat %s" % outputFile)
        logger.error(
            "Error executing %s\n*Most likely cause is a test FAILURE*\n",
            testCommand)
        return False
def runTestCommand(testCommand, env, outputFile=None):
  """
    Runs given test command with provided environment

    :param str testCommand: Test command that is suppose to be run
    :param dict env: Current environ set for HTM_IT_HOME, etc
    :param str outputFile: Optional, Path for output file where stdout should be
      redirected. It is passed only if the test are nonXunitTest, as the
      results are not generated as xml we need redirect them to a text file.
    :returns: return True if tests are successful, False otherwise
    :rtype: bool
  """
  try:
    if outputFile:
      check_call(testCommand, shell=True, env=env,
                 stdout=open(outputFile, "w"))
      # Updating console
      runWithOutput(command=("cat", outputFile), env=env, logger=g_logger)
    else:
      runWithOutput(command=testCommand, env=env, logger=g_logger)
    return True
  except (CalledProcessError, CommandFailedError):
    if outputFile:
      runWithOutput(command=("cat", outputFile), env=env, logger=g_logger)
    g_logger.error("Error executing %s\n*Most likely cause is a test FAILURE*",
                   testCommand)
    return False
예제 #34
0
def buildGrok(env, pipelineConfig, logger):
  """
    Builds Grok with given Grok SHA.
    :param env: The environment which will be set before building

    :param pipelineConfig: dict of the pipeline config values, e.g.:
      {
        "buildWorkspace": "/path/to/build/in",
        "grokRemote": "[email protected]:Numenta/numenta-apps.git",
        "grokBranch": "master",
        "grokSha": "HEAD",
        "nupicRemote": "[email protected]:numenta/nupic.git",
        "nupicBranch": "master",
        "nupicSha": "HEAD",
        "pipelineParams": "{dict of parameters}",
        "pipelineJson": "/path/to/json/file",
        "wheelFilePath": "/path/to/wheel/file"
      }

    :param logger: Logger object.

    :raises
      infrastructure.utilities.exceptions.BuildFailureException:
      This exception is raised if build fails.
  """
  try :
    sitePackagesDir = os.path.join(env["PRODUCTS"],
                                   "grok/lib/python2.7/site-packages")
    if not os.path.exists(sitePackagesDir):
      os.makedirs(sitePackagesDir)
    with changeToWorkingDir(env["PRODUCTS"]):
      installNuPICWheel(env, env["GROK_HOME"],
                        pipelineConfig["wheelFilePath"], logger)

    # Setup the baseline configuration
    with changeToWorkingDir(env["GROK_HOME"]):
      runWithOutput("python setup.py configure_grok", env=env, logger=logger)
  except:
    logger.exception("Unknown failure")
    raise BuildFailureException("Grok building failed. Exiting.")
예제 #35
0
    def cleanScripts(self):
        """
    Cleans the grok directory before packaging.
    """

        productsDirectory = self.productsDirectory
        config = self.config
        environment = self.environment
        logger = self.logger

        logger.info("Running cleanup scripts...")
        for cleaner in config.cleanupScripts:
            cleanerPath = os.path.join(productsDirectory, cleaner)
            workDirectory = os.path.dirname(cleanerPath)
            logger.debug("Changing to %s", workDirectory)
            logger.debug("Attempting to run %s", cleanerPath)
            if not os.path.exists(cleanerPath):
                raise InvalidParametersError("%s does not exist!" %
                                             cleanerPath)
            with changeToWorkingDir(workDirectory):
                runWithOutput("%s %s" % (cleanerPath, "--destroy-all-my-work"),
                              env=environment)
예제 #36
0
def buildNuPIC(env, logger, buildWorkspace):
  """
    Builds NuPIC

    :param env: The environment which will be set before building

    :raises infrastructure.utilities.exceptions.NupicBuildFailed:
      This exception is raised if build fails.
  """
  # Build
  with changeToWorkingDir(env["NUPIC"]):
    try:
      try:
        shutil.rmtree("build")
      except OSError:
        # didn't exist, so just pass
        pass

      # install requirements
      command = ("pip", "install", "--install-option=--prefix=%s" % env["NTA"],
                 "--requirement", "external/common/requirements.txt")

      runWithOutput(command=command, env=env, logger=logger)
      # need to remove this folder for wheel build to work
      shutil.rmtree("external/linux32arm")

      # build the distributions
      command = "python setup.py install --prefix=%s" % buildWorkspace
      # Building on jenkins, not local
      if "JENKINS_HOME" in os.environ:
        command += " bdist_wheel bdist_egg upload -r numenta-pypi"

      runWithOutput(command=command, env=env, logger=logger)
    except:
      logger.exception("Failed while building nupic")
      raise
    else:
      open("nupic.stamp", "a").close()
      logger.debug("NuPIC building was successful.")
예제 #37
0
def buildNuPIC(env, logger, buildWorkspace):
  """
    Builds NuPIC

    :param env: The environment which will be set before building

    :raises infrastructure.utilities.exceptions.NupicBuildFailed:
      This exception is raised if build fails.
  """
  # Build
  with changeToWorkingDir(env["NUPIC"]):
    try:
      try:
        shutil.rmtree("build")
      except OSError:
        # didn't exist, so just pass
        pass

      # install requirements
      command = ("pip", "install", "--install-option=--prefix=%s" % env["NTA"],
                 "--requirement", "external/common/requirements.txt")

      runWithOutput(command=command, env=env, logger=logger)
      # need to remove this folder for wheel build to work
      shutil.rmtree("external/linux32arm")

      # build the distributions
      command = "python setup.py install --prefix=%s" % buildWorkspace
      # Building on jenkins, not local
      if "JENKINS_HOME" in os.environ:
        command += " bdist_wheel bdist_egg upload -r numenta-pypi"

      runWithOutput(command=command, env=env, logger=logger)
    except:
      logger.exception("Failed while building nupic")
      raise
    else:
      open("nupic.stamp", "a").close()
      logger.debug("NuPIC building was successful.")
예제 #38
0
def buildCapnp(env, logger):
  """Builds capnp

    :param dict env: The environment which will be set before building.
    :param logger: An initialized logger

    :returns: Prefix path for capnp.

    :raises infrastructure.utilities.exceptions.NupicBuildFailed:
      This exception is raised if build fails.
  """
  with changeToWorkingDir(env["NUPIC_CORE_DIR"]):
    try:
      mkdirp("capnp_tmp")
      with changeToWorkingDir("capnp_tmp"):
        runWithOutput(
            ["curl", "-O", "https://capnproto.org/capnproto-c++-0.5.2.tar.gz"],
            env=env, logger=logger)
        runWithOutput(["tar", "zxf", "capnproto-c++-0.5.2.tar.gz"],
                      env=env, logger=logger)
        capnpTmp = os.getcwd()
        with changeToWorkingDir("capnproto-c++-0.5.2"):
          capnpEnv = env.copy()
          capnpEnv["CXXFLAGS"] = (
              "-fPIC -std=c++11 -m64 -fvisibility=hidden -Wall -Wreturn-type "
              "-Wunused -Wno-unused-parameter")
          runWithOutput(
              ["./configure", "--disable-shared",
               "--prefix={}".format(capnpTmp)],
              env=capnpEnv, logger=logger)
          runWithOutput("make -j4", env=env, logger=logger)
          runWithOutput("make install", env=env, logger=logger)
        return capnpTmp
    except Exception:
      logger.exception("capnp building failed due to unknown reason.")
      raise
    else:
      logger.info("capnp building was successful.")
예제 #39
0
    def installProductsIntoGrokFakeroot(self):
        """
    Clone our git repo into the fakeroot directory tree.

    If we're configured to use a site-packages tarball; burst it.

    :returns: SHA of the products repo in the fakeroot
    """

        config = self.config
        fakeroot = self.fakeroot
        logger = self.logger
        numentaPath = os.path.join(fakeroot, "opt", "numenta")
        logger.debug("Creating %s", numentaPath)
        mkpath(numentaPath)

        logger.debug("Cloning...")
        realSHA = rpm.gitCloneIntoFakeroot(fakeroot=fakeroot,
                                           installDirectory="opt/numenta",
                                           repoDirectory="products",
                                           gitURL=config.gitURL,
                                           logger=logger,
                                           sha=config.sha)

        logger.debug("Creating site-packages if required")
        libPython = os.path.join(fakeroot, "opt", "numenta", "products",
                                 "grok", "lib", "python2.7")

        mkpath(os.path.join(libPython, "site-packages"))

        # Burst site-packages tarball if set on command line
        if config.sitePackagesTarball:
            with changeToWorkingDir(libPython):
                logger.debug("Bursting %s in %s", config.sitePackagesTarball,
                             libPython)
                runWithOutput("tar xf %s" % config.sitePackagesTarball)

        return realSHA
예제 #40
0
def loadGitDescribeFromDirectory(gitDirectory):
    """
  Load & parse git describe data from gitDirectory

  :param gitDirectory: path to a git clone.
  """

    versionData = {}
    with changeToWorkingDir(gitDirectory):
        try:
            rawVersion = runWithOutput("git describe --long --tags --abbrev=40").strip().split("-")
            versionData["version"] = rawVersion[0]
            versionData["commitsSinceTag"] = rawVersion[1]
            versionData["sha"] = rawVersion[2]
        except RuntimeError:
            versionData = None
    return versionData
예제 #41
0
def loadGitDescribeFromDirectory(YOMPDirectory):
  """
  Load & parse YOMP describe data from YOMPDirectory

  :param YOMPDirectory: path to a YOMP clone.
  """

  versionData = {}
  with changeToWorkingDir(YOMPDirectory):
    try:
      rawVersion = runWithOutput("YOMP describe --long --tags --abbrev=40")\
                                 .strip().split("-")
      versionData["version"] = rawVersion[0]
      versionData["commitsSinceTag"] = rawVersion[1]
      versionData["sha"] = rawVersion[2]
    except RuntimeError:
      versionData = None
  return versionData
def loadGitDescribeFromDirectory(gitDirectory):
  """
  Load & parse git describe data from gitDirectory

  :param gitDirectory: path to a git clone.
  """

  versionData = {}
  with changeToWorkingDir(gitDirectory):
    try:
      rawVersion = runWithOutput(command=("git",
                                          "describe",
                                          "--log",
                                          "--tags",
                                          "--abbrev=40"),
                                 logger=g_logger).strip().split("-")
      versionData["version"] = rawVersion[0]
      versionData["commitsSinceTag"] = rawVersion[1]
      versionData["sha"] = rawVersion[2]
    except RuntimeError:
      versionData = None
  return versionData
예제 #43
0
def main(jsonArgs=None):
  """
    Creates an AMI using a HTM-IT RPM for a given SHA.

    1) Downloads the HTM-IT RPM corresponding to a given SHA to local disk
    2) Calls bake_ami.sh with the name of the HTM-IT RPM and the AMI name.
     to launch an instance with Packer, install the
       HTM-IT RPM from 1 products, runs integration
       tests, if green then stamps AMI

  """
  try:
    jsonArgs = jsonArgs or {}
    parsedArgs = addAndParseArgs(jsonArgs)

    amiName = parsedArgs["amiName"]

    if not (os.environ.get("AWS_ACCESS_KEY_ID") and
            os.environ.get("AWS_SECRET_ACCESS_KEY")):
      g_logger.error("Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
      raise MissingAWSKeysInEnvironment("AWS keys are not set")
    else:
      g_config["AWS_ACCESS_KEY_ID"] = os.environ["AWS_ACCESS_KEY_ID"]
      g_config["AWS_SECRET_ACCESS_KEY"] = os.environ["AWS_SECRET_ACCESS_KEY"]

    artifactsDir = createOrReplaceArtifactsDir(logger=g_logger)

    g_logger.info("Creating the Ami")
    pipeLineSrc = os.path.join(os.environ["PRODUCTS"], "htm-it", "htm-it",
                               "pipeline", "src")
    with changeToWorkingDir(pipeLineSrc):
      g_logger.info("\n\n########## Baking AMI ##########")
      g_logger.debug("########## AMI Name: %s ##########", amiName)

      # Baking AMI takes around 15 mins, so print as it runs so we see
      # progress in the jenkins console during the run
      runWithOutput("./bake_ami %s" % amiName, env=os.environ, logger=g_logger)

      amiIDPath = os.path.join(os.getcwd(), "ami.txt")

    with open(amiIDPath, "r") as amiFileHandler:
      readAmiId = (amiFileHandler.readline()).split(":")
      amiID = readAmiId[1].strip()
      g_logger.info("AMI ID generated is: %s", amiID)

    buildNumber = getBuildNumber(logger=g_logger)
    artifactAmiIdPath = os.path.join(artifactsDir, "ami_%s.txt" % buildNumber)
    shutil.copy(amiIDPath, artifactAmiIdPath)
    print "#############################################################"
    print "Running the Integration Tests"
    runIntegrationTestScriptPath = os.path.join(os.environ["PRODUCTS"], "htm-it",
                                    "htm-it", "pipeline", "src")
    runIntegrationTestCommand = ("python " +
                                 "%s/run_htm-it_integration_tests.py"
                                 % runIntegrationTestScriptPath +
                                 " --ami " + amiID)
    if parsedArgs["pipelineJson"]:
      runIntegrationTestCommand += (" --pipeline-json %s"
                                    % parsedArgs["pipelineJson"])

    g_logger.info(runIntegrationTestCommand)
    runWithOutput(runIntegrationTestCommand, env=os.environ, logger=g_logger)

    #Load the json file again and check the status of test
    with open(parsedArgs["pipelineJson"]) as jsonFile:
      params = json.load(jsonFile)
      integrationTestStatus = params.get("integration_test").get("testStatus")
    # Upload the ami-id to S3 if the pipeline was triggred with production
    # forks.
    if integrationTestStatus:
      g_logger.info("Uploading %s to S3 which contains the generated AMI: %s",
                    os.path.basename(artifactAmiIdPath), amiID)
      uploadToS3(config=g_config,
                 filePath=artifactAmiIdPath,
                 s3Folder="stable_ami",
                 logger=g_logger)

  except TestsFailed:
    g_logger.error("There was a failure executing the HTM-IT integration tests")
    raise
  except PipelineError:
    g_logger.exception("External process failed while baking the AMI")
    raise
  except Exception:
    g_logger.exception("Unknown error occurred while baking the AMI")
    raise
예제 #44
0
    def constructGrokFakeroot(self):
        """
    Construct a Grok fakeroot directory tree.

    1. Add any directories specified with --extend-pythonpath to the PYTHONPATH
       we will be using for setup.py, build scripts and the cleanup scripts.

    2. Install any wheels that have been specied by --use-wheel

    3. Run setup.py in any directories that have been specified with
       --setup-py-dir. Uses the arguments specfied by --setup-py-arguments.

    4. Run any build scripts specified by --build-script

    5. Run any cleanup scripts specified by --cleanup-script

    6. Purge any files or directories at the top level of the checkout that were
       not whitelisted with --whitelist.

    :returns: (iteration, actualSHA) where iteration is the total commit count
    in the repository and fakerootSHA is the SHA in the fakeroot. If we're
    packaging a branch or tip of master, we're still going to want to know what
    the SHA was so we can include it in the RPM description.

    :rtype: tuple
    """

        config = self.config
        fakeroot = self.fakeroot
        logger = self.logger

        logger.info("Preparing Grok fakeroot in %s\n", fakeroot)

        actualSHA = self.installProductsIntoGrokFakeroot()

        productsDirectory = self.productsDirectory
        grokPath = os.path.join(productsDirectory, "grok")
        iteration = git.getCommitCount(productsDirectory)

        # Extend PYTHONPATH for setup.py, build & cleanup scripts
        # pythonpathExtensions
        logger.debug("**************************************************")
        logger.info("Phase 1: Preparing PYTHONPATH and installing wheels")
        # Set extra python path
        self.setPythonPath()
        environment = self.environment
        sitePackagesDirectory = "%s/grok/lib/python2.7/site-packages" % \
                                productsDirectory

        # Install wheels if any have been specified
        with changeToWorkingDir(grokPath):
            for wheel in config.wheels:
                logger.info("Installing %s", os.path.basename(wheel))
                if not os.path.exists(wheel):
                    raise InvalidParametersError("%s does not exist!" % wheel)
                pipCommand = "pip install %s --no-deps --target=%s" % \
                  (wheel, sitePackagesDirectory)
                logger.debug("pip command: %s", pipCommand)
                runWithOutput(pipCommand)
                logger.debug("wheel install complete")

        # Run setup.py if specified
        logger.info("Phase 2: Running setup.py commands")

        for pyDir in config.setupPyDirs:
            pyDirPath = "%s/%s" % (productsDirectory, pyDir)
            logger.debug("Changing to %s", pyDirPath)
            with changeToWorkingDir(pyDirPath):
                setupCommand = "python setup.py develop --prefix=%s/grok" % \
                               productsDirectory
                logger.debug("Running %s", setupCommand)
                runWithOutput(setupCommand, env=environment)

        # Run any build scripts. We assume that they should be run in the
        # directory they're in.
        logger.info("Phase 3: Running build scripts...")
        for builder in config.buildScripts:
            builderPath = "%s/%s" % (fakeroot, builder)
            logger.debug("Attempting to run %s", builderPath)
            if not os.path.exists(builderPath):
                raise InvalidParametersError("%s does not exist!" %
                                             builderPath)
            workDirectory = os.path.dirname(builderPath)
            logger.debug("Changing to %s", workDirectory)
            with changeToWorkingDir(workDirectory):
                runWithOutput(builderPath, env=environment)

        # Run any cleanup scripts. We assume that they should be run in the
        # directory they're in.
        logger.info("Phase 4: Running cleanup scripts...")
        # Clean Scripts
        self.cleanScripts()

        logger.info("Phase 5: Purge anything not whitelisted.")
        # Purge anything not whitelisted
        self.purgeBlacklistedStuff()

        return (iteration, actualSHA)
예제 #45
0
def buildNuPICCore(env, nupicCoreSha, logger, buildWorkspace, nupicVersion):
    """
    Builds nupic.core

    :param dict env: The environment which will be set before building.
    :param str nupicCoreSha: The SHA which will be built.
    :param logger: An initialized logger
    :param str buildWorkspace: /path/to/buildWorkspace
    :param str nupicVersion: which version of NuPIC we're building (e.g. 0.3.4)

    :raises infrastructure.utilities.exceptions.NupicBuildFailed:
      This exception is raised if build fails.
  """
    with changeToWorkingDir(env["NUPIC_CORE_DIR"]):
        try:
            logger.debug("Building nupic.core SHA : %s ", nupicCoreSha)
            git.resetHard(sha=nupicCoreSha, logger=logger)

            capnpTmp = buildCapnp(env, logger)

            if isVersionGreaterOrEqual(nupicVersion, "0.3.5"):
                srcDir = "../.."
            else:
                srcDir = "../../src"

            # install pre-reqs into  the build workspace for isolation
            runWithOutput(
                command=("pip install -r bindings/py/requirements.txt "
                         "--install-option=--prefix=%s "
                         "--ignore-installed" % buildWorkspace),
                env=env,
                logger=logger)

            # also install pycapnp
            command = ("pip", "install",
                       "--install-option=--prefix=%s" % buildWorkspace,
                       "pycapnp==0.5.5")
            runWithOutput(command=command, env=env, logger=logger)

            shutil.rmtree("build", ignore_errors=True)
            mkdirp("build/scripts")
            with changeToWorkingDir("build/scripts"):
                libdir = sysconfig.get_config_var("LIBDIR")
                includeDir = sysconfig.get_config_var("INCLUDEPY")
                runWithOutput(
                    ("cmake {srcDir} -DCMAKE_INSTALL_PREFIX=../release "
                     "-DCMAKE_PREFIX_PATH={capnpPrefixPath} "
                     "-DPYTHON_LIBRARY={pythonLibDir}/libpython2.7.so "
                     "-DPYTHON_INCLUDE_DIR={pythonIncludeDir}").format(
                         srcDir=srcDir,
                         capnpPrefixPath=capnpTmp,
                         pythonLibDir=libdir,
                         pythonIncludeDir=includeDir),
                    env=env,
                    logger=logger)
                runWithOutput("VERBOSE=1 make -j 1", env=env, logger=logger)
                runWithOutput("make install", env=env, logger=logger)

            # need to remove this folder to allow the caching process to work
            shutil.rmtree("external/linux32arm")

            # build the distributions
            nupicBindingsEnv = env.copy()
            nupicBindingsEnv["CPPFLAGS"] = "-I{}".format(
                os.path.join(capnpTmp, "include"))
            nupicBindingsEnv["LDFLAGS"] = "-L{}".format(
                os.path.join(capnpTmp, "lib"))
            command = (
                "python setup.py install --prefix={} --nupic-core-dir={}".
                format(buildWorkspace,
                       os.path.join(os.getcwd(), "build", "release")))
            # Building on jenkins, not local
            if "JENKINS_HOME" in os.environ:
                command += " bdist_wheel bdist_egg upload -r numenta-pypi"
            runWithOutput(command=command, env=nupicBindingsEnv, logger=logger)
        except:
            logger.exception("Failed to build nupic.core")
            raise
        else:
            logger.info("nupic.core building was successful.")
예제 #46
0
def main(jsonArgs=None):
    """
    Creates an AMI using a YOMP RPM for a given SHA.

    1) Downloads the YOMP RPM corresponding to a given SHA to local disk
    2) Calls bake_ami.sh with the name of the YOMP RPM and the AMI name.
     to launch an instance with Packer, install the
       YOMP RPM from 1 products, runs integration
       tests, if green then stamps AMI

  """
    try:
        jsonArgs = jsonArgs or {}
        parsedArgs = addAndParseArgs(jsonArgs)

        amiName = parsedArgs["amiName"]

        if not (os.environ.get("AWS_ACCESS_KEY_ID")
                and os.environ.get("AWS_SECRET_ACCESS_KEY")):
            g_logger.error(
                "Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
            raise MissingAWSKeysInEnvironment("AWS keys are not set")
        else:
            g_config["AWS_ACCESS_KEY_ID"] = os.environ["AWS_ACCESS_KEY_ID"]
            g_config["AWS_SECRET_ACCESS_KEY"] = os.environ[
                "AWS_SECRET_ACCESS_KEY"]

        artifactsDir = createOrReplaceArtifactsDir()

        g_logger.info("Creating the Ami")
        pipeLineSrc = os.path.join(os.environ["PRODUCTS"], "YOMP", "YOMP",
                                   "pipeline", "src")
        with changeToWorkingDir(pipeLineSrc):
            g_logger.info("\n\n########## Baking AMI ##########")
            g_logger.debug("########## AMI Name: %s ##########", amiName)

            # Baking AMI takes around 15 mins, so print as it runs so we see
            # progress in the jenkins console during the run
            runWithOutput("./bake_ami %s" % amiName,
                          env=os.environ,
                          logger=g_logger)

            amiIDPath = os.path.join(os.getcwd(), "ami.txt")

        with open(amiIDPath, "r") as amiFileHandler:
            readAmiId = (amiFileHandler.readline()).split(":")
            amiID = readAmiId[1].strip()
            g_logger.info("AMI ID generated is: %s", amiID)

        buildNumber = getBuildNumber()
        artifactAmiIdPath = os.path.join(artifactsDir,
                                         "ami_%s.txt" % buildNumber)
        shutil.copy(amiIDPath, artifactAmiIdPath)
        print "#############################################################"
        print "Running the Integration Tests"
        runIntegrationTestScriptPath = os.path.join(os.environ["PRODUCTS"],
                                                    "YOMP", "YOMP", "pipeline",
                                                    "src")
        runIntegrationTestCommand = (
            "python " +
            "%s/run_YOMP_integration_tests.py" % runIntegrationTestScriptPath +
            " --ami " + amiID)
        if parsedArgs["pipelineJson"]:
            runIntegrationTestCommand += (" --pipeline-json %s" %
                                          parsedArgs["pipelineJson"])

        g_logger.info(runIntegrationTestCommand)
        runWithOutput(runIntegrationTestCommand,
                      env=os.environ,
                      logger=g_logger)

        #Load the json file again and check the status of test
        with open(parsedArgs["pipelineJson"]) as jsonFile:
            params = json.load(jsonFile)
            integrationTestStatus = params.get("integration_test").get(
                "testStatus")
        # Upload the ami-id to S3 if the pipeline was triggred with production
        # forks.
        if integrationTestStatus:
            g_logger.info(
                "Uploading %s to S3 which contains the generated AMI: %s",
                os.path.basename(artifactAmiIdPath), amiID)
            uploadToS3(config=g_config,
                       filePath=artifactAmiIdPath,
                       s3Folder="stable_ami",
                       logger=g_logger)

    except TestsFailed:
        g_logger.error(
            "There was a failure executing the YOMP integration tests")
        raise
    except PipelineError:
        g_logger.exception("External process failed while baking the AMI")
        raise
    except Exception:
        g_logger.exception("Unknown error occurred while baking the AMI")
        raise
예제 #47
0
def bakeRPM(fakeroot,
            rpmName,
            baseVersion,
            artifacts=None,
            iteration="1",
            epoch="1",
            extraFPMarguments=None,
            logger=None,
            debug=False,
            description=None,
            architecture=None,
            postInstall=None):
    """
  Bake an RPM from a fakeroot.

  Generates an fpm command to create the RPM, then calls fpm to do the actual
  RPM build.

  @param fakeroot - The fakeroot to bake

  @param rpmName - Name of RPM

  @param baseVersion - base section of RPM version (the 1.5 in 1.5-101). This
  is a string, not an int. For our RPMs, we should always use semver.org's
  semver format

  @param artifacts - objects in top level of fakeroot to include in the rpm

  @param iteration - iteration portion of the RPM version (the 101 in 1.5-101)

  @param epoch - epoch portion of the RPM. You should never change this

  @param extraFPMarguments - list of extra FPM arguments to include

  @param logger - logger object to use

  @param debug - debug status

  @param description - RPM description

  @param architecture - When present, have fpm force the output RPM's arch
  to this instead of defaulting to using autodetection

  @param postInstall - postinstall script to be included in the RPM
  """

    # Construct the fpm command arguments array.

    # Start with the standard ones for all RPMs
    command = [
        "fpm",
        "-t",
        "rpm",
        "-s",
        "dir",
        "-n",
        rpmName,
    ]

    if architecture:
        if logger:
            logger.debug("Forcing RPM architecture to %s" % architecture)
        command.append("-a")
        command.append(architecture)

    # Set RPM epoch
    command.append("--epoch")
    command.append(epoch)

    # Set RPM iteration
    command.append("--iteration")
    command.append(iteration)

    # Set RPM version
    command.append("--version")
    command.append(baseVersion)

    # Turn on verbose if we're in debug mode
    if debug:
        if logger:
            logger.debug("Enabling debug mode for fpm...")
        command.append("--verbose")

    # If we were given a description, add it to the RPM
    if description:
        if logger:
            logger.debug("Setting RPM description: %s", description)
        command.append("--description")
        command.append("'" + description + "'")

    # Add a postinstall script if we were given one
    if postInstall:
        if logger:
            logger.debug("Adding --after-install to fpm arguments")
            logger.debug("after-install: %s", postInstall)
        command.append("--after-install")
        command.append(postInstall)

    # Tell fpm where the fakeroot is
    command.append("-C")
    command.append(fakeroot)
    if logger:
        logger.debug("fakeroot: %s", fakeroot)

    # Add any extra fpm arguments
    if extraFPMarguments:
        for fpmArg in extraFPMarguments:
            if logger:
                logger.debug("Adding %s to fpm arguments", fpmArg)
            command.append(fpmArg)

    # Add all the top level artifacts we want included in the RPM
    if artifacts:
        for artifact in artifacts:
            command.append(artifact)
            if logger:
                logger.debug("Including %s in RPM", artifact)

    fpmCommand = " ".join(command)
    if logger:
        logger.debug("fpm command: %s", fpmCommand)
    runWithOutput(command)
예제 #48
0
    # Set extra python path
    self.setPythonPath()
    environment = self.environment
    sitePackagesDirectory = "%s/htm-it/lib/python2.7/site-packages" % \
                            productsDirectory

    # Install wheels if any have been specified
    with changeToWorkingDir(htm-itPath):
      for wheel in config.wheels:
        logger.info("Installing %s", os.path.basename(wheel))
        if not os.path.exists(wheel):
          raise InvalidParametersError("%s does not exist!" % wheel)
        pipCommand = "pip install %s --no-deps --target=%s" % \
          (wheel, sitePackagesDirectory)
        logger.debug("pip command: %s", pipCommand)
        runWithOutput(pipCommand)
        logger.debug("wheel install complete")

    # Run setup.py if specified
    logger.info("Phase 2: Running setup.py commands")

    for pyDir in config.setupPyDirs:
      pyDirPath = "%s/%s" % (productsDirectory, pyDir)
      logger.debug("Changing to %s", pyDirPath)
      with changeToWorkingDir(pyDirPath):
        setupCommand = "python setup.py develop --prefix=%s/htm-it" % \
                       productsDirectory
        logger.debug("Running %s", setupCommand)
        runWithOutput(setupCommand, env=environment)

    # Run any build scripts. We assume that they should be run in the
예제 #49
0
def packageDirectory(fakeroot,
                     packageName,
                     baseVersion,
                     afterInstall,
                     sha=None,
                     workDir=None,
                     depends=None,
                     arch=None,
                     description=None,
                     epoch=0,
                     release=0,
                     directoriesOwned=None
                     ):
  """
  Package a directory into an rpm. Generates rpms named in the following
  format:

    packageName-baseVersion-release.arch.rpm

  or

    packageName_sha-baseVersion-release.arch.rpm

  :param fakeroot: path to the directory to use as the root of the RPM's
    install tree
  :param packageName: This is the base name for the package. yum install
    packageName will pull the latest version
  :param baseVersion: The base version.
  :param sha: if passed, we include it in the package name as packageName_sha
  :param workDir: directory path, passed to fpm.
  :param depends: comma separated list of packages required before this can
    be installed.
  :param arch: architecture. Either noarch or x86_64.
  :description: description to include in the rpm.
  :epoch: passed to fpm for use as the rpm epoch. Typically 0.
  :release: String. We either use commits since the release tag,
    or YYYY.MM.DD.HH.MM.SS.
  :directoriesOwned: comma separated list of directories that the rpm should
    tag as owned by the rpm. Directories in this list will be removed when
    the rpm is uninstalled if they are empty after the package's files are
    removed.
  """

  command = ["fpm", "--verbose"]
  if not arch:
    if sys.platform.startswith("darwin"):
      # We're running on OS X, force noarch
      rpmType = "noarch"
      command.extend(["-a", "noarch"])
    else:
      rpmType = "x86_64"
  else:
    rpmType = arch
  if release > 0:
    rpmRelease = release
  else:
    rpmRelease = VERSION_TIMESTAMP
  if sha:
    rpmName = "%s_%s-%s-%s.%s.rpm" % (packageName,
                                      sha,
                                      baseVersion,
                                      rpmRelease,
                                      rpmType)
  else:
    rpmName = "%s-%s-%s.%s.rpm" % (packageName,
                                   baseVersion,
                                   rpmRelease,
                                   rpmType)
  command.extend(["--epoch", epoch, "-s", "dir", "-t", "rpm",
                  "--architecture", rpmType])

  if description:
    command.extend(["--description", description])
  command.extend(["--name", packageName])

  if depends:
    for dependency in depends.split(","):
      command.extend(["-d", dependency])

  command.extend(["--version", baseVersion, "--iteration", rpmRelease,
                  "--package", rpmName, "-C", fakeroot])

  if directoriesOwned is not None:
    command.extend(["--directories", directoriesOwned])

  if workDir:
    command.extend(["--workdir", workDir])

  if afterInstall:
    command.extend(["--after-install", afterInstall])

  # Find the top level files/dirs in the fakeroot and add them explicitly
  # to fpm's argument list
  fakerootFiles = os.listdir(fakeroot)
  command.extend(fakerootFiles)
  g_logger.debug("Running %s ... ", command)
  runWithOutput(command=command, logger=g_logger)
  return rpmName
예제 #50
0
def packageDirectory(fakeroot,
                     packageName,
                     baseVersion,
                     afterInstall,
                     sha=None,
                     workDir=None,
                     depends=None,
                     arch=None,
                     description=None,
                     epoch=0,
                     release=0,
                     directoriesOwned=None
                     ):
  """
  Package a directory into an rpm. Generates rpms named in the following
  format:

    packageName-baseVersion-release.arch.rpm

  or

    packageName_sha-baseVersion-release.arch.rpm

  :param fakeroot: path to the directory to use as the root of the RPM's
    install tree
  :param packageName: This is the base name for the package. yum install
    packageName will pull the latest version
  :param baseVersion: The base version.
  :param sha: if passed, we include it in the package name as packageName_sha
  :param workDir: directory path, passed to fpm.
  :param depends: comma separated list of packages required before this can
    be installed.
  :param arch: architecture. Either noarch or x86_64.
  :description: description to include in the rpm.
  :epoch: passed to fpm for use as the rpm epoch. Typically 0.
  :release: String. We either use commits since the release tag,
    or YYYY.MM.DD.HH.MM.SS.
  :directoriesOwned: comma separated list of directories that the rpm should
    tag as owned by the rpm. Directories in this list will be removed when
    the rpm is uninstalled if they are empty after the package's files are
    removed.
  """

  command = "fpm --verbose "
  if not arch:
    if sys.platform.startswith("darwin"):
      # We're running on OS X, force noarch
      rpmType = "noarch"
      command += "-a noarch "
    else:
      rpmType = "x86_64"
  else:
    rpmType = arch
  if release > 0:
    rpmRelease = release
  else:
    rpmRelease = VERSION_TIMESTAMP
  if sha:
    rpmName = "%s_%s-%s-%s.%s.rpm" % (packageName,
                                      sha,
                                      baseVersion,
                                      rpmRelease,
                                      rpmType)
  else:
    rpmName = "%s-%s-%s.%s.rpm" % (packageName,
                                   baseVersion,
                                   rpmRelease,
                                   rpmType)
  command += "--epoch %s -s dir -t rpm --architecture %s " % (epoch, rpmType)

  if description:
    command += "--description '%s' " % (description)
  command += "--name %s " % (packageName)

  if depends:
    for dependency in depends.split(","):
      command += "-d %s " % (dependency)

  command += "--version %s --iteration %s --package %s -C %s " % (baseVersion,
                                                                  rpmRelease,
                                                                  rpmName,
                                                                  fakeroot)

  if directoriesOwned != None:
    command += "--directories %s " % directoriesOwned

  if workDir:
    command += "--workdir %s " % (workDir)

  if afterInstall:
    command += "--after-install %s " % (afterInstall)

  # Find the top level files/dirs in the fakeroot and add them explicitly
  # to fpm's argument list
  fakerootFiles = os.listdir(fakeroot)
  for rpmEntry in fakerootFiles:
    command += "%s " % (rpmEntry)
  g_logger.debug("Running %s ... ", command)
  runWithOutput(command)
  return rpmName
예제 #51
0
def buildNuPICCore(env, nupicCoreSha, logger, buildWorkspace, nupicVersion):
  """
    Builds nupic.core

    :param dict env: The environment which will be set before building.
    :param str nupicCoreSha: The SHA which will be built.
    :param logger: An initialized logger
    :param str buildWorkspace: /path/to/buildWorkspace
    :param str nupicVersion: which version of NuPIC we're building (e.g. 0.3.4)

    :raises infrastructure.utilities.exceptions.NupicBuildFailed:
      This exception is raised if build fails.
  """
  with changeToWorkingDir(env["NUPIC_CORE_DIR"]):
    try:
      logger.debug("Building nupic.core SHA : %s ", nupicCoreSha)
      git.resetHard(sha=nupicCoreSha, logger=logger)

      if isVersionGreaterOrEqual(nupicVersion, "0.3.5"):
        srcDir = "../.."
      else:
        srcDir = "../../src"

      pyExtensionsDir = "../../bindings/py/nupic/bindings"

      # install pre-reqs into  the build workspace for isolation
      runWithOutput(command=("pip install -r bindings/py/requirements.txt "
                             "--install-option=--prefix=%s "
                             "--ignore-installed" % buildWorkspace),
                            env=env, logger=logger)

      # also install pycapnp
      command = ("pip", "install",
                 "--install-option=--prefix=%s" % buildWorkspace,
                 "pycapnp==0.5.5")
      runWithOutput(command=command, env=env, logger=logger)

      shutil.rmtree("build", ignore_errors=True)
      mkdirp("build/scripts")
      with changeToWorkingDir("build/scripts"):
        libdir = sysconfig.get_config_var("LIBDIR")
        includeDir = sysconfig.get_config_var("INCLUDEPY")
        runWithOutput(("cmake {srcDir} -DCMAKE_INSTALL_PREFIX=../release "
                       "-DPYTHON_LIBRARY={pythonLibDir}/libpython2.7.so "
                       "-DPYTHON_INCLUDE_DIR={pythonIncludeDir} "
                       "-DPY_EXTENSIONS_DIR={pyExtensionsDir}").format(
                           srcDir=srcDir,
                           pythonLibDir=libdir,
                           pythonIncludeDir=includeDir,
                           pyExtensionsDir=pyExtensionsDir),
                      env=env, logger=logger)
        newEnv = dict(env)
        newEnv["VERBOSE"] = "1"
        runWithOutput("make -j 1", env=newEnv, logger=logger)
        runWithOutput("make install", env=env, logger=logger)

      # need to remove this folder to allow the caching process to work
      shutil.rmtree("external/linux32arm")

      # build the distributions
      command = "python setup.py install --prefix={}".format(buildWorkspace)
      # Building on jenkins, not local
      if "JENKINS_HOME" in os.environ:
        command += " bdist_wheel bdist_egg upload -r numenta-pypi"
      runWithOutput(command=command, env=env, logger=logger)
    except:
      logger.exception("Failed to build nupic.core")
      raise
    else:
      logger.info("nupic.core building was successful.")
예제 #52
0
  def constructHTMITFakeroot(self):
    """
    Construct a HTM-IT fakeroot directory tree.

    1. Add any directories specified with --extend-pythonpath to the PYTHONPATH
       we will be using for setup.py, build scripts and the cleanup scripts.

    2. Install any wheels that have been specied by --use-wheel

    3. Run setup.py in any directories that have been specified with
       --setup-py-dir. Uses the arguments specfied by --setup-py-arguments.

    4. Run any build scripts specified by --build-script

    5. Run any cleanup scripts specified by --cleanup-script

    6. Purge any files or directories at the top level of the checkout that were
       not whitelisted with --whitelist.

    :returns: (iteration, actualSHA) where iteration is the total commit count
    in the repository and fakerootSHA is the SHA in the fakeroot. If we're
    packaging a branch or tip of master, we're still going to want to know what
    the SHA was so we can include it in the RPM description.

    :rtype: tuple
    """

    config = self.config
    fakeroot = self.fakeroot
    logger = self.logger

    logger.info("Preparing HTM-IT fakeroot in %s\n", fakeroot)

    actualSHA = self.installProductsIntoHTMITFakeroot()

    productsDirectory = self.productsDirectory
    htm-itPath = os.path.join(productsDirectory, "htm-it")
    iteration = git.getCommitCount(productsDirectory, logger=logger)

    # Extend PYTHONPATH for setup.py, build & cleanup scripts
    # pythonpathExtensions
    logger.debug("**************************************************")
    logger.info("Phase 1: Preparing PYTHONPATH and installing wheels")
    # Set extra python path
    self.setPythonPath()
    environment = self.environment
    sitePackagesDirectory = "%s/htm-it/lib/python2.7/site-packages" % \
                            productsDirectory

    # Install wheels if any have been specified
    with changeToWorkingDir(htm-itPath):
      for wheel in config.wheels:
        logger.info("Installing %s", os.path.basename(wheel))
        if not os.path.exists(wheel):
          raise InvalidParametersError("%s does not exist!" % wheel)
        pipCommand = "pip install %s --no-deps --target=%s" % \
          (wheel, sitePackagesDirectory)
        logger.debug("pip command: %s", pipCommand)
        runWithOutput(pipCommand)
        logger.debug("wheel install complete")

    # Run setup.py if specified
    logger.info("Phase 2: Running setup.py commands")

    for pyDir in config.setupPyDirs:
      pyDirPath = "%s/%s" % (productsDirectory, pyDir)
      logger.debug("Changing to %s", pyDirPath)
      with changeToWorkingDir(pyDirPath):
        setupCommand = "python setup.py develop --prefix=%s/htm-it" % \
                       productsDirectory
        logger.debug("Running %s", setupCommand)
        runWithOutput(setupCommand, env=environment)

    # Run any build scripts. We assume that they should be run in the
    # directory they're in.
    logger.info("Phase 3: Running build scripts...")
    for builder in config.buildScripts:
      builderPath = "%s/%s" % (fakeroot, builder)
      logger.debug("Attempting to run %s", builderPath)
      if not os.path.exists(builderPath):
        raise InvalidParametersError("%s does not exist!" % builderPath)
      workDirectory = os.path.dirname(builderPath)
      logger.debug("Changing to %s", workDirectory)
      with changeToWorkingDir(workDirectory):
        runWithOutput(builderPath, env=environment)

    # Run any cleanup scripts. We assume that they should be run in the
    # directory they're in.
    logger.info("Phase 4: Running cleanup scripts...")
    # Clean Scripts
    self.cleanScripts()

    logger.info("Phase 5: Purge anything not whitelisted.")
    # Purge anything not whitelisted
    self.purgeBlacklistedStuff()

    return (iteration, actualSHA)
예제 #53
0
def bakeRPM(fakeroot,
            rpmName,
            baseVersion,
            artifacts=None,
            iteration="1",
            epoch="1",
            extraFPMarguments=None,
            logger=None,
            debug=False,
            description=None,
            architecture=None,
            postInstall=None):
  """
  Bake an RPM from a fakeroot.

  Generates an fpm command to create the RPM, then calls fpm to do the actual
  RPM build.

  @param fakeroot - The fakeroot to bake

  @param rpmName - Name of RPM

  @param baseVersion - base section of RPM version (the 1.5 in 1.5-101). This
  is a string, not an int. For our RPMs, we should always use semver.org's
  semver format

  @param artifacts - objects in top level of fakeroot to include in the rpm

  @param iteration - iteration portion of the RPM version (the 101 in 1.5-101)

  @param epoch - epoch portion of the RPM. You should never change this

  @param extraFPMarguments - list of extra FPM arguments to include

  @param logger - logger object to use

  @param debug - debug status

  @param description - RPM description

  @param architecture - When present, have fpm force the output RPM's arch
  to this instead of defaulting to using autodetection

  @param postInstall - postinstall script to be included in the RPM
  """

  # Construct the fpm command arguments array.

  # Start with the standard ones for all RPMs
  command = ["fpm",
             "-t", "rpm",
             "-s", "dir",
             "-n", rpmName,
            ]

  if architecture:
    if logger:
      logger.debug("Forcing RPM architecture to %s" % architecture)
    command.append("-a")
    command.append(architecture)

  # Set RPM epoch
  command.append("--epoch")
  command.append(epoch)

  # Set RPM iteration
  command.append("--iteration")
  command.append(iteration)

  # Set RPM version
  command.append("--version")
  command.append(baseVersion)

  # Turn on verbose if we're in debug mode
  if debug:
    if logger:
      logger.debug("Enabling debug mode for fpm...")
    command.append("--verbose")

  # If we were given a description, add it to the RPM
  if description:
    if logger:
      logger.debug("Setting RPM description: %s", description)
    command.append("--description")
    command.append("'" + description + "'")

  # Add a postinstall script if we were given one
  if postInstall:
    if logger:
      logger.debug("Adding --after-install to fpm arguments")
      logger.debug("after-install: %s", postInstall)
    command.append("--after-install")
    command.append(postInstall)

  # Tell fpm where the fakeroot is
  command.append("-C")
  command.append(fakeroot)
  if logger:
    logger.debug("fakeroot: %s", fakeroot)

  # Add any extra fpm arguments
  if extraFPMarguments:
    for fpmArg in extraFPMarguments:
      if logger:
        logger.debug("Adding %s to fpm arguments", fpmArg)
      command.append(fpmArg)

  # Add all the top level artifacts we want included in the RPM
  if artifacts:
    for artifact in artifacts:
      command.append(artifact)
      if logger:
        logger.debug("Including %s in RPM", artifact)

  fpmCommand = " ".join(command)
  if logger:
    logger.debug("fpm command: %s", fpmCommand)
  runWithOutput(command)