Exemple #1
0
def createTextFileAndUpload(fileName, fileContents, fileDir, s3Folder, logger):
    """
    Creates file, add the Contents in the file and upload that file to S3.

    :param fileName: Name of the file to be created
    :param fileContents: Contents of the file
    :param fileDir: The path of the file to upload
    :param s3Folder: The S3 folder where the file is to be uploaded
    :param logger: A valid Numenta logger
  """

    with open(fileName, "w") as fHandle:
        fHandle.write(fileContents)

    filePath = os.path.join(fileDir, fileName)
    s3.uploadToS3(g_config, filePath, s3Folder, logger)
def createTextFileAndUpload(fileName, fileContents, fileDir, s3Folder, logger):
  """
    Creates file, add the Contents in the file and upload that file to S3.

    :param fileName: Name of the file to be created
    :param fileContents: Contents of the file
    :param fileDir: The path of the file to upload
    :param s3Folder: The S3 folder where the file is to be uploaded
    :param logger: A valid Numenta logger
  """

  with open(fileName, "w") as fHandle:
    fHandle.write(fileContents)

  filePath = os.path.join(fileDir, fileName)
  s3.uploadToS3(g_config, filePath, s3Folder, logger)
Exemple #3
0
def cacheNuPICCore(env, buildWorkspace, nupicCoreSHA, uploadToS3, logger):
    """
    Caches nupic.core to /var/build/NuPIC.core/<SHA> and uploads to S3

    :param env: The environment dict
    :param buildWorkspace: The buildWorkspace were nupic.core is built
    :param nupicSha: A `string` representing SHA
    :param uploadToS3: `boolean` defining whether to upload to S3 or not

    :raises: CommandFailedError if the tar process fails before upload.
  """
    cachedPath = "/var/build/nupic.core/%s" % nupicCoreSHA

    if not os.path.isdir(cachedPath):
        logger.info("Caching nupic.core to %s", cachedPath)

        with changeToWorkingDir(buildWorkspace):
            shutil.copytree(
                "nupic.core",
                ("/var/build/nupic.core/%s/nupic.core" % nupicCoreSHA))

            if uploadToS3:
                nupicCoreZip = "nupic.core-%s.zip" % nupicCoreSHA

                logger.info("Archiving nupic.core to %s", nupicCoreZip)
                command = "tar czf %s nupic.core" % nupicCoreZip

                nupicCoreZipPath = "%s/%s" % (buildWorkspace, nupicCoreZip)
                try:
                    runWithOutput(command, env, logger=logger)
                    logger.debug("Uploading %s to S3.", nupicCoreZip)
                    s3.uploadToS3(g_config, nupicCoreZipPath,
                                  "builds_nupic_core", logger)
                except:
                    logger.exception("Archiving nupic.core failed.")
                    raise CommandFailedError("Archiving nupic.core failed.")
                else:
                    logger.info("nupic.core cached locally and to S3.")

    else:
        logger.debug("Cached nupic.core already exists.")
Exemple #4
0
def cacheNuPIC(env, nupicSha, uploadToS3, logger):
    """
    Caches a green build of NuPIC to /var/build/nupic/<SHA>

    :param env: The environment dict
    :param nupicSha: A `string` representing SHA.
  """
    cachedPath = "/var/build/nupic/%s" % nupicSha
    if not os.path.isdir(cachedPath):
        try:
            logger.info("Caching NuPIC to %s", cachedPath)
            shutil.copytree(env["NUPIC"], cachedPath)

            wheelDir = env["NUPIC"] + "/dist"
            wheelFile = glob.glob("%s/*.whl" % wheelDir)[0]
            logger.debug("Uploading %s to S3.", wheelFile)
            s3.uploadToS3(g_config, wheelFile, "builds_nupic_wheel", logger)

            wheelFileName = os.path.basename(wheelFile)
            fileDir = os.getcwd()
            s3Folder = "stable_nupic_version"
            contents = nupicSha + ":" + wheelFileName

            createTextFileAndUpload("nupic-package-version.txt", contents,
                                    fileDir, s3Folder, logger)
            createTextFileAndUpload(nupicSha, wheelFileName, fileDir, s3Folder,
                                    logger)
            shutil.move("nupic-package-version.txt", ARTIFACTS_DIR)
            with open("nupicSHA.txt", "w") as fHandle:
                fHandle.write(nupicSha)
            shutil.move("nupicSHA.txt", ARTIFACTS_DIR)

        except:
            logger.exception("Caching NuPIC failed.")
            raise
        else:
            logger.info("NuPIC cached locally and to S3.")

    else:
        logger.debug("Cached NuPIC already exists.")
def cacheNuPICCore(env, buildWorkspace, nupicCoreSHA, uploadToS3, logger):
  """
    Caches nupic.core to /var/build/NuPIC.core/<SHA> and uploads to S3

    :param env: The environment dict
    :param buildWorkspace: The buildWorkspace were nupic.core is built
    :param nupicSha: A `string` representing SHA
    :param uploadToS3: `boolean` defining whether to upload to S3 or not

    :raises: CommandFailedError if the tar process fails before upload.
  """
  cachedPath = "/var/build/nupic.core/%s" % nupicCoreSHA

  if not os.path.isdir(cachedPath):
    logger.info("Caching nupic.core to %s", cachedPath)

    with changeToWorkingDir(buildWorkspace):
      shutil.copytree("nupic.core", ("/var/build/nupic.core/%s/nupic.core" %
                                     nupicCoreSHA))

      if uploadToS3:
        nupicCoreZip = "nupic.core-%s.zip" % nupicCoreSHA

        logger.info("Archiving nupic.core to %s", nupicCoreZip)
        command = "tar czf %s nupic.core" % nupicCoreZip

        nupicCoreZipPath = "%s/%s" % (buildWorkspace, nupicCoreZip)
        try:
          runWithOutput(command, env, logger=logger)
          logger.debug("Uploading %s to S3.", nupicCoreZip)
          s3.uploadToS3(g_config, nupicCoreZipPath,
                        "builds_nupic_core", logger)
        except:
          logger.exception("Archiving nupic.core failed.")
          raise CommandFailedError("Archiving nupic.core failed.")
        else:
          logger.info("nupic.core cached locally and to S3.")

  else:
    logger.debug("Cached nupic.core already exists.")
def cacheNuPIC(env, nupicSha, uploadToS3, logger):
  """
    Caches a green build of NuPIC to /var/build/nupic/<SHA>

    :param env: The environment dict
    :param nupicSha: A `string` representing SHA.
  """
  cachedPath = "/var/build/nupic/%s" % nupicSha
  if not os.path.isdir(cachedPath):
    try:
      logger.info("Caching NuPIC to %s", cachedPath)
      shutil.copytree(env["NUPIC"], cachedPath)

      wheelDir = env["NUPIC"] + "/dist"
      wheelFile = glob.glob("%s/*.whl" % wheelDir)[0]
      logger.debug("Uploading %s to S3.", wheelFile)
      s3.uploadToS3(g_config, wheelFile, "builds_nupic_wheel", logger)

      wheelFileName = os.path.basename(wheelFile)
      fileDir = os.getcwd()
      s3Folder = "stable_nupic_version"
      contents = nupicSha + ":" + wheelFileName

      createTextFileAndUpload("nupic-package-version.txt", contents, fileDir,
                              s3Folder, logger)
      createTextFileAndUpload(nupicSha, wheelFileName, fileDir, s3Folder,
                              logger)
      shutil.move("nupic-package-version.txt", ARTIFACTS_DIR)
      with open("nupicSHA.txt", "w") as fHandle:
        fHandle.write(nupicSha)
      shutil.move("nupicSHA.txt", ARTIFACTS_DIR)

    except:
      logger.exception("Caching NuPIC failed.")
      raise
    else:
      logger.info("NuPIC cached locally and to S3.")

  else:
    logger.debug("Cached NuPIC already exists.")
Exemple #7
0
def main(jsonArgs=None):
  """
    Creates an AMI using a HTM-IT RPM for a given SHA.

    1) Downloads the HTM-IT RPM corresponding to a given SHA to local disk
    2) Calls bake_ami.sh with the name of the HTM-IT RPM and the AMI name.
     to launch an instance with Packer, install the
       HTM-IT RPM from 1 products, runs integration
       tests, if green then stamps AMI

  """
  try:
    jsonArgs = jsonArgs or {}
    parsedArgs = addAndParseArgs(jsonArgs)

    amiName = parsedArgs["amiName"]

    if not (os.environ.get("AWS_ACCESS_KEY_ID") and
            os.environ.get("AWS_SECRET_ACCESS_KEY")):
      g_logger.error("Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
      raise MissingAWSKeysInEnvironment("AWS keys are not set")
    else:
      g_config["AWS_ACCESS_KEY_ID"] = os.environ["AWS_ACCESS_KEY_ID"]
      g_config["AWS_SECRET_ACCESS_KEY"] = os.environ["AWS_SECRET_ACCESS_KEY"]

    artifactsDir = createOrReplaceArtifactsDir(logger=g_logger)

    g_logger.info("Creating the Ami")
    pipeLineSrc = os.path.join(os.environ["PRODUCTS"], "htm-it", "htm-it",
                               "pipeline", "src")
    with changeToWorkingDir(pipeLineSrc):
      g_logger.info("\n\n########## Baking AMI ##########")
      g_logger.debug("########## AMI Name: %s ##########", amiName)

      # Baking AMI takes around 15 mins, so print as it runs so we see
      # progress in the jenkins console during the run
      runWithOutput("./bake_ami %s" % amiName, env=os.environ, logger=g_logger)

      amiIDPath = os.path.join(os.getcwd(), "ami.txt")

    with open(amiIDPath, "r") as amiFileHandler:
      readAmiId = (amiFileHandler.readline()).split(":")
      amiID = readAmiId[1].strip()
      g_logger.info("AMI ID generated is: %s", amiID)

    buildNumber = getBuildNumber(logger=g_logger)
    artifactAmiIdPath = os.path.join(artifactsDir, "ami_%s.txt" % buildNumber)
    shutil.copy(amiIDPath, artifactAmiIdPath)
    print "#############################################################"
    print "Running the Integration Tests"
    runIntegrationTestScriptPath = os.path.join(os.environ["PRODUCTS"], "htm-it",
                                    "htm-it", "pipeline", "src")
    runIntegrationTestCommand = ("python " +
                                 "%s/run_htm-it_integration_tests.py"
                                 % runIntegrationTestScriptPath +
                                 " --ami " + amiID)
    if parsedArgs["pipelineJson"]:
      runIntegrationTestCommand += (" --pipeline-json %s"
                                    % parsedArgs["pipelineJson"])

    g_logger.info(runIntegrationTestCommand)
    runWithOutput(runIntegrationTestCommand, env=os.environ, logger=g_logger)

    #Load the json file again and check the status of test
    with open(parsedArgs["pipelineJson"]) as jsonFile:
      params = json.load(jsonFile)
      integrationTestStatus = params.get("integration_test").get("testStatus")
    # Upload the ami-id to S3 if the pipeline was triggred with production
    # forks.
    if integrationTestStatus:
      g_logger.info("Uploading %s to S3 which contains the generated AMI: %s",
                    os.path.basename(artifactAmiIdPath), amiID)
      uploadToS3(config=g_config,
                 filePath=artifactAmiIdPath,
                 s3Folder="stable_ami",
                 logger=g_logger)

  except TestsFailed:
    g_logger.error("There was a failure executing the HTM-IT integration tests")
    raise
  except PipelineError:
    g_logger.exception("External process failed while baking the AMI")
    raise
  except Exception:
    g_logger.exception("Unknown error occurred while baking the AMI")
    raise
Exemple #8
0
def main(jsonArgs=None):
    """
    Creates an AMI using a YOMP RPM for a given SHA.

    1) Downloads the YOMP RPM corresponding to a given SHA to local disk
    2) Calls bake_ami.sh with the name of the YOMP RPM and the AMI name.
     to launch an instance with Packer, install the
       YOMP RPM from 1 products, runs integration
       tests, if green then stamps AMI

  """
    try:
        jsonArgs = jsonArgs or {}
        parsedArgs = addAndParseArgs(jsonArgs)

        amiName = parsedArgs["amiName"]

        if not (os.environ.get("AWS_ACCESS_KEY_ID")
                and os.environ.get("AWS_SECRET_ACCESS_KEY")):
            g_logger.error(
                "Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
            raise MissingAWSKeysInEnvironment("AWS keys are not set")
        else:
            g_config["AWS_ACCESS_KEY_ID"] = os.environ["AWS_ACCESS_KEY_ID"]
            g_config["AWS_SECRET_ACCESS_KEY"] = os.environ[
                "AWS_SECRET_ACCESS_KEY"]

        artifactsDir = createOrReplaceArtifactsDir()

        g_logger.info("Creating the Ami")
        pipeLineSrc = os.path.join(os.environ["PRODUCTS"], "YOMP", "YOMP",
                                   "pipeline", "src")
        with changeToWorkingDir(pipeLineSrc):
            g_logger.info("\n\n########## Baking AMI ##########")
            g_logger.debug("########## AMI Name: %s ##########", amiName)

            # Baking AMI takes around 15 mins, so print as it runs so we see
            # progress in the jenkins console during the run
            runWithOutput("./bake_ami %s" % amiName,
                          env=os.environ,
                          logger=g_logger)

            amiIDPath = os.path.join(os.getcwd(), "ami.txt")

        with open(amiIDPath, "r") as amiFileHandler:
            readAmiId = (amiFileHandler.readline()).split(":")
            amiID = readAmiId[1].strip()
            g_logger.info("AMI ID generated is: %s", amiID)

        buildNumber = getBuildNumber()
        artifactAmiIdPath = os.path.join(artifactsDir,
                                         "ami_%s.txt" % buildNumber)
        shutil.copy(amiIDPath, artifactAmiIdPath)
        print "#############################################################"
        print "Running the Integration Tests"
        runIntegrationTestScriptPath = os.path.join(os.environ["PRODUCTS"],
                                                    "YOMP", "YOMP", "pipeline",
                                                    "src")
        runIntegrationTestCommand = (
            "python " +
            "%s/run_YOMP_integration_tests.py" % runIntegrationTestScriptPath +
            " --ami " + amiID)
        if parsedArgs["pipelineJson"]:
            runIntegrationTestCommand += (" --pipeline-json %s" %
                                          parsedArgs["pipelineJson"])

        g_logger.info(runIntegrationTestCommand)
        runWithOutput(runIntegrationTestCommand,
                      env=os.environ,
                      logger=g_logger)

        #Load the json file again and check the status of test
        with open(parsedArgs["pipelineJson"]) as jsonFile:
            params = json.load(jsonFile)
            integrationTestStatus = params.get("integration_test").get(
                "testStatus")
        # Upload the ami-id to S3 if the pipeline was triggred with production
        # forks.
        if integrationTestStatus:
            g_logger.info(
                "Uploading %s to S3 which contains the generated AMI: %s",
                os.path.basename(artifactAmiIdPath), amiID)
            uploadToS3(config=g_config,
                       filePath=artifactAmiIdPath,
                       s3Folder="stable_ami",
                       logger=g_logger)

    except TestsFailed:
        g_logger.error(
            "There was a failure executing the YOMP integration tests")
        raise
    except PipelineError:
        g_logger.exception("External process failed while baking the AMI")
        raise
    except Exception:
        g_logger.exception("Unknown error occurred while baking the AMI")
        raise