Exemplo n.º 1
0
def main(jsonArgs=None):
    """
    This is the Main fuction, which creates YOMP rpms and
    writes the status to the json file if it is json driven.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson": <PIPELINE_JSON_PATH>,
            "logLevel": <LOG_LEVEL>}


    :raises: raises generic Exception if anything else goes wrong.
  """
    jsonArgs = jsonArgs or {}
    (buildWorkspace, YOMPSha, releaseVersion, pipelineParams, pipelineJson,
     YOMPRemote) = addAndParseArgs(jsonArgs)
    try:
        # TODO: TAUR-841: Use an IAM role on the the jenkins instances instead of
        # embedding the AWS keypair in the repo. Boto will take care of either
        # loading the key from the environment or from the IAM role automagically.
        if not (os.environ.get("AWS_ACCESS_KEY_ID")
                and os.environ.get("AWS_SECRET_ACCESS_KEY")):
            g_logger.error(
                "Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
            raise exceptions.MissingAWSKeysInEnvironment(
                "AWS keys are not set")

        env = prepareEnv(buildWorkspace, None, os.environ)
        artifactsDir = jenkins.createOrReplaceArtifactsDir()
        syncRpm, rpmNameDetails = buildRpms(env, YOMPSha, releaseVersion,
                                            artifactsDir, g_logger, g_config,
                                            YOMPRemote)
        packageRpm = {
            "syncRpm": syncRpm,
            "YOMPRpmName": rpmNameDetails["YOMP"],
            "repoName": "x86_64"
        }
        pipelineParams["packageRpm"] = packageRpm
        g_logger.debug(pipelineParams)
        if pipelineJson:
            with open(pipelineJson, 'w') as fp:
                fp.write(json.dumps(pipelineParams, ensure_ascii=False))
    except Exception:
        g_logger.exception("Unknown error occurred in pack RPM phase")
        raise
Exemplo n.º 2
0
def main(jsonArgs=None):
  """
    This is the Main fuction, which creates HTM-IT rpms and
    writes the status to the json file if it is json driven.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson": <PIPELINE_JSON_PATH>,
            "logLevel": <LOG_LEVEL>}


    :raises: raises generic Exception if anything else goes wrong.
  """
  jsonArgs = jsonArgs or {}
  (buildWorkspace, htm-itSha, releaseVersion,
   pipelineParams, pipelineJson, htm-itRemote) = addAndParseArgs(jsonArgs)
  try:
    # TODO: TAUR-841: Use an IAM role on the the jenkins instances instead of
    # embedding the AWS keypair in the repo. Boto will take care of either
    # loading the key from the environment or from the IAM role automagically.
    if not (os.environ.get("AWS_ACCESS_KEY_ID") and
            os.environ.get("AWS_SECRET_ACCESS_KEY")):
      g_logger.error("Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
      raise exceptions.MissingAWSKeysInEnvironment("AWS keys are not set")

    env = prepareEnv(buildWorkspace, None, os.environ)
    artifactsDir = jenkins.createOrReplaceArtifactsDir(logger=g_logger)
    syncRpm, rpmNameDetails = buildRpms(env, htm-itSha,
                                        releaseVersion, artifactsDir,
                                        g_logger, g_config, htm-itRemote)
    packageRpm = {"syncRpm": syncRpm,
                  "htm-itRpmName": rpmNameDetails["htm-it"],
                  "repoName": "x86_64"}
    pipelineParams["packageRpm"] = packageRpm
    g_logger.debug(pipelineParams)
    if pipelineJson:
      with open(pipelineJson, 'w') as fp:
        fp.write(json.dumps(pipelineParams, ensure_ascii=False))
  except Exception:
    g_logger.exception("Unknown error occurred in pack RPM phase")
    raise
Exemplo n.º 3
0
def cacheNuPIC(env, nupicSha, logger):
  """
    Caches a green build of NuPIC to /var/build/nupic/<SHA>

    :param env: The environment dict
    :param nupicSha: A `string` representing SHA.
  """
  cachedPath = "/var/build/nupic/%s" % nupicSha
  if not os.path.isdir(cachedPath):
    try:
      logger.info("Caching NuPIC to %s", cachedPath)
      shutil.copytree(env["NUPIC"], cachedPath)

      wheelDir = env["NUPIC"] + "/dist"
      wheelFile = glob.glob("%s/*.whl" % wheelDir)[0]
      wheelFileName = os.path.basename(wheelFile)
      contents = nupicSha + ":" + wheelFileName

      createTextFile(fileName="nupic-package-version.txt",
                     fileContents=contents)
      createTextFile(fileName="nupicSHA.txt",
                     fileContents=nupicSha)

      artifactsDir = createOrReplaceArtifactsDir(logger=logger)

      shutil.move("nupic-package-version.txt", artifactsDir)
      with open("nupicSHA.txt", "w") as fHandle:
        fHandle.write(nupicSha)
      shutil.move("nupicSHA.txt", artifactsDir)

    except:
      logger.exception("Caching NuPIC failed.")
      raise
    else:
      logger.info("NuPIC cached locally.")
  else:
    logger.debug("Cached NuPIC already exists.")
Exemplo n.º 4
0
def cacheNuPIC(env, nupicSha, logger):
  """
    Caches a green build of NuPIC to /var/build/nupic/<SHA>

    :param env: The environment dict
    :param nupicSha: A `string` representing SHA.
  """
  cachedPath = "/var/build/nupic/%s" % nupicSha
  if not os.path.isdir(cachedPath):
    try:
      logger.info("Caching NuPIC to %s", cachedPath)
      shutil.copytree(env["NUPIC"], cachedPath)

      wheelDir = env["NUPIC"] + "/dist"
      wheelFile = glob.glob("%s/*.whl" % wheelDir)[0]
      wheelFileName = os.path.basename(wheelFile)
      contents = nupicSha + ":" + wheelFileName

      createTextFile(fileName="nupic-package-version.txt",
                     fileContents=contents)
      createTextFile(fileName="nupicSHA.txt",
                     fileContents=nupicSha)

      artifactsDir = createOrReplaceArtifactsDir(logger=logger)

      shutil.move("nupic-package-version.txt", artifactsDir)
      with open("nupicSHA.txt", "w") as fHandle:
        fHandle.write(nupicSha)
      shutil.move("nupicSHA.txt", artifactsDir)

    except:
      logger.exception("Caching NuPIC failed.")
      raise
    else:
      logger.info("NuPIC cached locally.")
  else:
    logger.debug("Cached NuPIC already exists.")
Exemplo n.º 5
0
from infrastructure.utilities import git
from infrastructure.utilities.jenkins import (createOrReplaceResultsDir,
                                              createOrReplaceArtifactsDir)
from infrastructure.utilities import logger as log
from infrastructure.utilities import s3
from infrastructure.utilities.env import addNupicCoreToEnv
from infrastructure.utilities.exceptions import (CommandFailedError,
                                                 NupicBuildFailed,
                                                 PipelineError)
from infrastructure.utilities.path import changeToWorkingDir
from infrastructure.utilities.cli import runWithOutput

NUPIC_CORE_REMOTE = "[email protected]:numenta/nupic.core.git"
SCRIPTS_DIR = os.path.join(git.getGitRootFolder(), "nupic-pipeline", "scripts")
ARTIFACTS_DIR = createOrReplaceArtifactsDir()

DOXYFILE = "docs/Doxyfile"
INIT_FILE = "nupic/__init__.py"
VERSION_FILE = "VERSION"

g_config = yaml.load(
    resource_stream(__name__, "../../../conf/nupic/config.yaml"))


def fetchNuPIC(env, buildWorkspace, nupicRemote, nupicBranch, nupicSha,
               logger):
    """
    This method clones NuPIC repo if it is not present
    and checks out to required nupicBranch
Exemplo n.º 6
0
def main(jsonArgs=None):
  """
    Creates an AMI using a HTM-IT RPM for a given SHA.

    1) Downloads the HTM-IT RPM corresponding to a given SHA to local disk
    2) Calls bake_ami.sh with the name of the HTM-IT RPM and the AMI name.
     to launch an instance with Packer, install the
       HTM-IT RPM from 1 products, runs integration
       tests, if green then stamps AMI

  """
  try:
    jsonArgs = jsonArgs or {}
    parsedArgs = addAndParseArgs(jsonArgs)

    amiName = parsedArgs["amiName"]

    if not (os.environ.get("AWS_ACCESS_KEY_ID") and
            os.environ.get("AWS_SECRET_ACCESS_KEY")):
      g_logger.error("Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
      raise MissingAWSKeysInEnvironment("AWS keys are not set")
    else:
      g_config["AWS_ACCESS_KEY_ID"] = os.environ["AWS_ACCESS_KEY_ID"]
      g_config["AWS_SECRET_ACCESS_KEY"] = os.environ["AWS_SECRET_ACCESS_KEY"]

    artifactsDir = createOrReplaceArtifactsDir(logger=g_logger)

    g_logger.info("Creating the Ami")
    pipeLineSrc = os.path.join(os.environ["PRODUCTS"], "htm-it", "htm-it",
                               "pipeline", "src")
    with changeToWorkingDir(pipeLineSrc):
      g_logger.info("\n\n########## Baking AMI ##########")
      g_logger.debug("########## AMI Name: %s ##########", amiName)

      # Baking AMI takes around 15 mins, so print as it runs so we see
      # progress in the jenkins console during the run
      runWithOutput("./bake_ami %s" % amiName, env=os.environ, logger=g_logger)

      amiIDPath = os.path.join(os.getcwd(), "ami.txt")

    with open(amiIDPath, "r") as amiFileHandler:
      readAmiId = (amiFileHandler.readline()).split(":")
      amiID = readAmiId[1].strip()
      g_logger.info("AMI ID generated is: %s", amiID)

    buildNumber = getBuildNumber(logger=g_logger)
    artifactAmiIdPath = os.path.join(artifactsDir, "ami_%s.txt" % buildNumber)
    shutil.copy(amiIDPath, artifactAmiIdPath)
    print "#############################################################"
    print "Running the Integration Tests"
    runIntegrationTestScriptPath = os.path.join(os.environ["PRODUCTS"], "htm-it",
                                    "htm-it", "pipeline", "src")
    runIntegrationTestCommand = ("python " +
                                 "%s/run_htm-it_integration_tests.py"
                                 % runIntegrationTestScriptPath +
                                 " --ami " + amiID)
    if parsedArgs["pipelineJson"]:
      runIntegrationTestCommand += (" --pipeline-json %s"
                                    % parsedArgs["pipelineJson"])

    g_logger.info(runIntegrationTestCommand)
    runWithOutput(runIntegrationTestCommand, env=os.environ, logger=g_logger)

    #Load the json file again and check the status of test
    with open(parsedArgs["pipelineJson"]) as jsonFile:
      params = json.load(jsonFile)
      integrationTestStatus = params.get("integration_test").get("testStatus")
    # Upload the ami-id to S3 if the pipeline was triggred with production
    # forks.
    if integrationTestStatus:
      g_logger.info("Uploading %s to S3 which contains the generated AMI: %s",
                    os.path.basename(artifactAmiIdPath), amiID)
      uploadToS3(config=g_config,
                 filePath=artifactAmiIdPath,
                 s3Folder="stable_ami",
                 logger=g_logger)

  except TestsFailed:
    g_logger.error("There was a failure executing the HTM-IT integration tests")
    raise
  except PipelineError:
    g_logger.exception("External process failed while baking the AMI")
    raise
  except Exception:
    g_logger.exception("Unknown error occurred while baking the AMI")
    raise
Exemplo n.º 7
0
def main(jsonArgs=None):
    """
    Creates an AMI using a YOMP RPM for a given SHA.

    1) Downloads the YOMP RPM corresponding to a given SHA to local disk
    2) Calls bake_ami.sh with the name of the YOMP RPM and the AMI name.
     to launch an instance with Packer, install the
       YOMP RPM from 1 products, runs integration
       tests, if green then stamps AMI

  """
    try:
        jsonArgs = jsonArgs or {}
        parsedArgs = addAndParseArgs(jsonArgs)

        amiName = parsedArgs["amiName"]

        if not (os.environ.get("AWS_ACCESS_KEY_ID")
                and os.environ.get("AWS_SECRET_ACCESS_KEY")):
            g_logger.error(
                "Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
            raise MissingAWSKeysInEnvironment("AWS keys are not set")
        else:
            g_config["AWS_ACCESS_KEY_ID"] = os.environ["AWS_ACCESS_KEY_ID"]
            g_config["AWS_SECRET_ACCESS_KEY"] = os.environ[
                "AWS_SECRET_ACCESS_KEY"]

        artifactsDir = createOrReplaceArtifactsDir()

        g_logger.info("Creating the Ami")
        pipeLineSrc = os.path.join(os.environ["PRODUCTS"], "YOMP", "YOMP",
                                   "pipeline", "src")
        with changeToWorkingDir(pipeLineSrc):
            g_logger.info("\n\n########## Baking AMI ##########")
            g_logger.debug("########## AMI Name: %s ##########", amiName)

            # Baking AMI takes around 15 mins, so print as it runs so we see
            # progress in the jenkins console during the run
            runWithOutput("./bake_ami %s" % amiName,
                          env=os.environ,
                          logger=g_logger)

            amiIDPath = os.path.join(os.getcwd(), "ami.txt")

        with open(amiIDPath, "r") as amiFileHandler:
            readAmiId = (amiFileHandler.readline()).split(":")
            amiID = readAmiId[1].strip()
            g_logger.info("AMI ID generated is: %s", amiID)

        buildNumber = getBuildNumber()
        artifactAmiIdPath = os.path.join(artifactsDir,
                                         "ami_%s.txt" % buildNumber)
        shutil.copy(amiIDPath, artifactAmiIdPath)
        print "#############################################################"
        print "Running the Integration Tests"
        runIntegrationTestScriptPath = os.path.join(os.environ["PRODUCTS"],
                                                    "YOMP", "YOMP", "pipeline",
                                                    "src")
        runIntegrationTestCommand = (
            "python " +
            "%s/run_YOMP_integration_tests.py" % runIntegrationTestScriptPath +
            " --ami " + amiID)
        if parsedArgs["pipelineJson"]:
            runIntegrationTestCommand += (" --pipeline-json %s" %
                                          parsedArgs["pipelineJson"])

        g_logger.info(runIntegrationTestCommand)
        runWithOutput(runIntegrationTestCommand,
                      env=os.environ,
                      logger=g_logger)

        #Load the json file again and check the status of test
        with open(parsedArgs["pipelineJson"]) as jsonFile:
            params = json.load(jsonFile)
            integrationTestStatus = params.get("integration_test").get(
                "testStatus")
        # Upload the ami-id to S3 if the pipeline was triggred with production
        # forks.
        if integrationTestStatus:
            g_logger.info(
                "Uploading %s to S3 which contains the generated AMI: %s",
                os.path.basename(artifactAmiIdPath), amiID)
            uploadToS3(config=g_config,
                       filePath=artifactAmiIdPath,
                       s3Folder="stable_ami",
                       logger=g_logger)

    except TestsFailed:
        g_logger.error(
            "There was a failure executing the YOMP integration tests")
        raise
    except PipelineError:
        g_logger.exception("External process failed while baking the AMI")
        raise
    except Exception:
        g_logger.exception("Unknown error occurred while baking the AMI")
        raise
Exemplo n.º 8
0
from infrastructure.utilities import logger as log
from infrastructure.utilities import s3
from infrastructure.utilities.env import addNupicCoreToEnv
from infrastructure.utilities.exceptions import (
  CommandFailedError,
  NupicBuildFailed,
  PipelineError
)
from infrastructure.utilities.path import changeToWorkingDir
from infrastructure.utilities.cli import runWithOutput



NUPIC_CORE_REMOTE = "[email protected]:numenta/nupic.core.git"
SCRIPTS_DIR = os.path.join(git.getGitRootFolder(), "nupic-pipeline", "scripts")
ARTIFACTS_DIR = createOrReplaceArtifactsDir()

g_config = yaml.load(
            resource_stream(__name__, "../../../conf/nupic/config.yaml"))



def fetchNuPIC(env, buildWorkspace, nupicRemote, nupicBranch, nupicSha, logger):
  """
    This method clones NuPIC repo if it is not present
    and checks out to required nupicBranch

    :param env: The environment which will be used before building.
    :param buildWorkspace: The workspace where NuPIC should be built
    :param nupicRemote: URL for NuPIC remote repository
    :param nupicBranch: The NuPIC branch which will be used to build