예제 #1
0
def main(jsonArgs=None):
  """
    Main function.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson" : <PIPELINE_JSON_PATH>,
            "logLevel" : <LOG_LEVEL>}

  """
  jsonArgs = jsonArgs or {}
  testResult = False
  try:
    (buildWorkspace, pipelineParams, pipelineJson) = addAndParseArgs(jsonArgs)

    os.environ["BUILD_WORKSPACE"] = buildWorkspace
    env = prepareEnv(buildWorkspace, None, os.environ)

    testResult = runUnitTests(env=env)
    # Write testResult to JSON file if JSON file driven run
    if pipelineJson:
      pipelineParams["test"] = {"testStatus" : testResult}
      with open(pipelineJson, 'w') as fp:
        fp.write(json.dumps(pipelineParams, ensure_ascii=False))
      runWithOutput("cat %s" % pipelineJson)
    # In any case log success/failure to console and exit accordingly
    exitStatus = int(not testResult)
    if exitStatus:
      g_logger.error("Test Failure!!!")
    else:
      g_logger.debug("All tests passed")
    return exitStatus
  except:
    g_logger.exception("Unknown error occurred while running unit tests")
    raise
예제 #2
0
def main(jsonArgs):
  """
    Main function.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson" : <PIPELINE_JSON_PATH>,
            "logLevel" : <LOG_LEVEL>}

    :raises NupicBuildFailed if build fails, or a Generic Exception in all
    other cases.

    :param jsonArgs: dict of  pipeline-json and logLevel
      e.g. {"pipelineJson" : <PIPELINE_JSON_PATH>,
            "logLevel" : <LOG_LEVEL>}
  """
  try:
    pipelineConfig = addAndParseArgs(jsonArgs)

    grokUser = getGithubUserName(pipelineConfig["grokRemote"])
    nupicUser = getGithubUserName(pipelineConfig["nupicRemote"])
    amiName = (grokUser + "-" + pipelineConfig["grokBranch"])
    env = prepareEnv(pipelineConfig["buildWorkspace"], None, os.environ)

    preBuildSetup(env, pipelineConfig)

    builder.buildGrok(env, pipelineConfig, g_logger)
    g_logger.debug("Grok built successfully!")

    deployTrack = getDeployTrack(pipelineConfig["grokRemote"],
                                 pipelineConfig["nupicRemote"],
                                 pipelineConfig["grokBranch"],
                                 pipelineConfig["nupicBranch"])

    pipelineConfig["pipelineParams"]["build"] = {
                              "grokSha": pipelineConfig["grokSha"],
                              "nupicSha": pipelineConfig["nupicSha"],
                              "grokHome": env["GROK_HOME"],
                              "nupicBuildDir": env["NUPIC"].rpartition("/")[0],
                              "deployTrack": deployTrack,
                              "grokDeployTrack": grokUser,
                              "nupicDeployTrack": nupicUser,
                              "amiName": amiName
                            }
    g_logger.debug(pipelineConfig["pipelineParams"])
    if pipelineConfig["pipelineJson"]:
      with open(pipelineConfig["pipelineJson"], 'w') as jsonFile:
        jsonFile.write(json.dumps(pipelineConfig["pipelineParams"],
                       ensure_ascii=False))
  except NupicBuildFailed:
    g_logger.exception("NuPIC building failed")
    raise
  except Exception:
    g_logger.exception("Unknown error occurred in build phase")
    raise
예제 #3
0
def main(jsonArgs=None):
    """
    This is the Main fuction, which creates YOMP rpms and
    writes the status to the json file if it is json driven.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson": <PIPELINE_JSON_PATH>,
            "logLevel": <LOG_LEVEL>}


    :raises: raises generic Exception if anything else goes wrong.
  """
    jsonArgs = jsonArgs or {}
    (buildWorkspace, YOMPSha, releaseVersion, pipelineParams, pipelineJson,
     YOMPRemote) = addAndParseArgs(jsonArgs)
    try:
        # TODO: TAUR-841: Use an IAM role on the the jenkins instances instead of
        # embedding the AWS keypair in the repo. Boto will take care of either
        # loading the key from the environment or from the IAM role automagically.
        if not (os.environ.get("AWS_ACCESS_KEY_ID")
                and os.environ.get("AWS_SECRET_ACCESS_KEY")):
            g_logger.error(
                "Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
            raise exceptions.MissingAWSKeysInEnvironment(
                "AWS keys are not set")

        env = prepareEnv(buildWorkspace, None, os.environ)
        artifactsDir = jenkins.createOrReplaceArtifactsDir()
        syncRpm, rpmNameDetails = buildRpms(env, YOMPSha, releaseVersion,
                                            artifactsDir, g_logger, g_config,
                                            YOMPRemote)
        packageRpm = {
            "syncRpm": syncRpm,
            "YOMPRpmName": rpmNameDetails["YOMP"],
            "repoName": "x86_64"
        }
        pipelineParams["packageRpm"] = packageRpm
        g_logger.debug(pipelineParams)
        if pipelineJson:
            with open(pipelineJson, 'w') as fp:
                fp.write(json.dumps(pipelineParams, ensure_ascii=False))
    except Exception:
        g_logger.exception("Unknown error occurred in pack RPM phase")
        raise
예제 #4
0
파일: build.py 프로젝트: bopopescu/what
def main(jsonArgs):
    """
    Main function.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson" : <PIPELINE_JSON_PATH>,
            "logLevel" : <LOG_LEVEL>}

    :param jsonArgs: dict of  pipeline-json and logLevel
      e.g. {"pipelineJson" : <PIPELINE_JSON_PATH>,
            "logLevel" : <LOG_LEVEL>}
  """
    try:
        pipelineConfig = addAndParseArgs(jsonArgs)

        YOMPUser = getGithubUserName(pipelineConfig["YOMPRemote"])
        amiName = (YOMPUser + "-" + pipelineConfig["YOMPBranch"])
        env = prepareEnv(pipelineConfig["buildWorkspace"], None, os.environ)

        preBuildSetup(env, pipelineConfig)

        builder.buildYOMP(env, pipelineConfig, g_logger)
        g_logger.debug("YOMP built successfully!")

        deployTrack = getDeployTrack(pipelineConfig["YOMPRemote"],
                                     pipelineConfig["YOMPBranch"])

        pipelineConfig["pipelineParams"]["build"] = {
            "YOMPSha": pipelineConfig["YOMPSha"],
            "YOMPHome": env["YOMP_HOME"],
            "deployTrack": deployTrack,
            "YOMPDeployTrack": YOMPUser,
            "amiName": amiName
        }
        g_logger.debug(pipelineConfig["pipelineParams"])
        if pipelineConfig["pipelineJson"]:
            with open(pipelineConfig["pipelineJson"], 'w') as jsonFile:
                jsonFile.write(
                    json.dumps(pipelineConfig["pipelineParams"],
                               ensure_ascii=False))
    except Exception:
        g_logger.exception("Unknown error occurred in build phase")
        raise
예제 #5
0
def main(jsonArgs):
  """
    Main function.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson" : <PIPELINE_JSON_PATH>,
            "logLevel" : <LOG_LEVEL>}

    :param jsonArgs: dict of  pipeline-json and logLevel
      e.g. {"pipelineJson" : <PIPELINE_JSON_PATH>,
            "logLevel" : <LOG_LEVEL>}
  """
  try:
    pipelineConfig = addAndParseArgs(jsonArgs)

    htm-itUser = getGithubUserName(pipelineConfig["htm-itRemote"])
    amiName = (htm-itUser + "-" + pipelineConfig["htm-itBranch"])
    env = prepareEnv(pipelineConfig["buildWorkspace"], None, os.environ)

    preBuildSetup(env, pipelineConfig)

    builder.buildHTM-IT(env, pipelineConfig, g_logger)
    g_logger.debug("HTM-IT built successfully!")

    deployTrack = getDeployTrack(pipelineConfig["htm-itRemote"],
                                 pipelineConfig["htm-itBranch"])

    pipelineConfig["pipelineParams"]["build"] = {
                              "htm-itSha": pipelineConfig["htm-itSha"],
                              "htm-itHome": env["HTM-IT_HOME"],
                              "deployTrack": deployTrack,
                              "htm-itDeployTrack": htm-itUser,
                              "amiName": amiName
                            }
    g_logger.debug(pipelineConfig["pipelineParams"])
    if pipelineConfig["pipelineJson"]:
      with open(pipelineConfig["pipelineJson"], 'w') as jsonFile:
        jsonFile.write(json.dumps(pipelineConfig["pipelineParams"],
                       ensure_ascii=False))
  except Exception:
    g_logger.exception("Unknown error occurred in build phase")
    raise
예제 #6
0
def main(jsonArgs=None):
  """
    This is the Main fuction, which creates HTM-IT rpms and
    writes the status to the json file if it is json driven.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson": <PIPELINE_JSON_PATH>,
            "logLevel": <LOG_LEVEL>}


    :raises: raises generic Exception if anything else goes wrong.
  """
  jsonArgs = jsonArgs or {}
  (buildWorkspace, htm-itSha, releaseVersion,
   pipelineParams, pipelineJson, htm-itRemote) = addAndParseArgs(jsonArgs)
  try:
    # TODO: TAUR-841: Use an IAM role on the the jenkins instances instead of
    # embedding the AWS keypair in the repo. Boto will take care of either
    # loading the key from the environment or from the IAM role automagically.
    if not (os.environ.get("AWS_ACCESS_KEY_ID") and
            os.environ.get("AWS_SECRET_ACCESS_KEY")):
      g_logger.error("Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
      raise exceptions.MissingAWSKeysInEnvironment("AWS keys are not set")

    env = prepareEnv(buildWorkspace, None, os.environ)
    artifactsDir = jenkins.createOrReplaceArtifactsDir(logger=g_logger)
    syncRpm, rpmNameDetails = buildRpms(env, htm-itSha,
                                        releaseVersion, artifactsDir,
                                        g_logger, g_config, htm-itRemote)
    packageRpm = {"syncRpm": syncRpm,
                  "htm-itRpmName": rpmNameDetails["htm-it"],
                  "repoName": "x86_64"}
    pipelineParams["packageRpm"] = packageRpm
    g_logger.debug(pipelineParams)
    if pipelineJson:
      with open(pipelineJson, 'w') as fp:
        fp.write(json.dumps(pipelineParams, ensure_ascii=False))
  except Exception:
    g_logger.exception("Unknown error occurred in pack RPM phase")
    raise
예제 #7
0
def main(jsonArgs=None):
    """
    Main function.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson" : <PIPELINE_JSON_PATH>,
            "logLevel" : <LOG_LEVEL>}

  """
    jsonArgs = jsonArgs or {}
    testResult = False
    try:
        (pipeline, buildWorkspace, grokSha, pipelineParams,
         pipelineJson) = addAndParseArgs(jsonArgs)

        os.environ["BUILD_WORKSPACE"] = buildWorkspace
        env = prepareEnv(buildWorkspace, None, os.environ)

        # Tests are failing without LD_LIBRARY_PATH, HACK
        env.update(
            LD_LIBRARY_PATH="/opt/numenta/anaconda/lib:/usr/lib64:/usr/lib")

        testResult = runUnitTests(env, pipeline, grokSha, g_logger)
        # Write testResult to JSON file if JSON file driven run
        if pipelineJson:
            pipelineParams["test"] = {"testStatus": testResult}
            with open(pipelineJson, 'w') as fp:
                fp.write(json.dumps(pipelineParams, ensure_ascii=False))
            runWithOutput("cat %s" % pipelineJson)
        # In any case log success/failure to console and exit accordingly
        exitStatus = int(not testResult)
        if exitStatus:
            g_logger.error("Test Failure!!!")
        else:
            g_logger.debug("All tests passed")
        return exitStatus
    except:
        g_logger.exception("Unknown error occurred while running unit tests")
        raise