コード例 #1
0
def main(args):
    """
    Main function for the pipeline. Executes all sub-tasks

    :param args: Parsed command line arguments
  """
    logger = initPipelineLogger("manifest", logLevel=args.logLevel)
    buildWorkspace = os.environ.get("BUILD_WORKSPACE", None)
    if not buildWorkspace:
        baseDir = jenkins.getWorkspace()
        buildId = jenkins.getBuildNumber()
        buildWorkspace = mkdtemp(prefix=buildId, dir=baseDir)

    manifest = vars(args)
    # Update buildWorkspace in manifest section for pipelineJson
    manifest.update({"buildWorkspace": buildWorkspace})
    manifestEnv = {"manifest": manifest}

    with open("%s/%s_pipeline.json" % (buildWorkspace, args.pipeline),
              'w') as fp:
        fp.write(json.dumps(manifestEnv, ensure_ascii=False))

    logger.debug(json.dumps(manifestEnv))
    pipelineJsonPath = "%s/%s_pipeline.json" % (buildWorkspace, args.pipeline)
    logger.info("Pipeline JSON path: %s", pipelineJsonPath)
    return pipelineJsonPath
コード例 #2
0
ファイル: manifest.py プロジェクト: dm1973/numenta-apps
def main(args):
  """
    Main function for the pipeline. Executes all sub-tasks

    :param dict args: Parsed command line arguments

    :returns: /path/to/pipelineJson
    :rtype: str
  """
  logger = initPipelineLogger("manifest", logLevel=args.logLevel)
  buildWorkspace = os.environ.get("BUILD_WORKSPACE",
                                  jenkins.defineBuildWorkspace(logger=logger))
  mkdirp(buildWorkspace)

  manifest = vars(args)
  # Update buildWorkspace in manifest section for pipelineJson
  manifest.update({"buildWorkspace": buildWorkspace})
  manifestEnv = {"manifest": manifest}

  with open("%s/%s_pipeline.json" % (buildWorkspace, args.pipeline), 'w') as fp:
    fp.write(json.dumps(manifestEnv, ensure_ascii=False))

  logger.debug(json.dumps(manifestEnv))
  pipelineJsonPath = "%s/%s_pipeline.json" % (buildWorkspace, args.pipeline)
  logger.info("Pipeline JSON path: %s", pipelineJsonPath)
  return pipelineJsonPath
コード例 #3
0
def main(args):
    """
    Main function for the pipeline. Executes all sub-tasks

    :param dict args: Parsed command line arguments

    :returns: /path/to/pipelineJson
    :rtype: str
  """
    logger = initPipelineLogger("manifest", logLevel=args.logLevel)
    buildWorkspace = os.environ.get(
        "BUILD_WORKSPACE", jenkins.defineBuildWorkspace(logger=logger))
    mkdirp(buildWorkspace)

    manifest = vars(args)
    # Update buildWorkspace in manifest section for pipelineJson
    manifest.update({"buildWorkspace": buildWorkspace})
    manifestEnv = {"manifest": manifest}

    with open("%s/%s_pipeline.json" % (buildWorkspace, args.pipeline),
              'w') as fp:
        fp.write(json.dumps(manifestEnv, ensure_ascii=False))

    logger.debug(json.dumps(manifestEnv))
    pipelineJsonPath = "%s/%s_pipeline.json" % (buildWorkspace, args.pipeline)
    logger.info("Pipeline JSON path: %s", pipelineJsonPath)
    return pipelineJsonPath
コード例 #4
0
def main(args):
  """
    Main function for the pipeline. Executes all sub-tasks

    :param args: Parsed command line arguments
  """
  logger = initPipelineLogger("manifest", logLevel=args.logLevel)
  buildWorkspace = os.environ.get("BUILD_WORKSPACE", None)
  if not buildWorkspace:
    baseDir = jenkins.getWorkspace()
    buildId = jenkins.getBuildNumber()
    buildWorkspace = mkdtemp(prefix=buildId, dir=baseDir)

  manifest = vars(args)
  # Update buildWorkspace in manifest section for pipelineJson
  manifest.update({"buildWorkspace": buildWorkspace})
  manifestEnv = {"manifest": manifest}

  with open("%s/%s_pipeline.json" % (buildWorkspace, args.pipeline), 'w') as fp:
    fp.write(json.dumps(manifestEnv, ensure_ascii=False))

  logger.debug(json.dumps(manifestEnv))
  pipelineJsonPath = "%s/%s_pipeline.json" % (buildWorkspace, args.pipeline)
  logger.info("Pipeline JSON path: %s", pipelineJsonPath)
  return pipelineJsonPath
def main(args):
  """
  This function will terminate the stale instances running the regions
  passed as parameter to the script.
  Instances which satisfy following conditions will be terminated.
  - Name of the instance starts with 'vertis_'. Instances starting with
    'vertis_donotremove_' will not be terminated.
  - Instances which are running for more than three hours, and have blank
    'Name' tag.
  """

  logger = initPipelineLogger("janitor_ec2", logLevel=args.logLevel)

  awsAccessKeyId = os.environ.get("AWS_ACCESS_KEY_ID")
  awsScrectAccessKey = os.environ.get("AWS_SECRET_ACCESS_KEY")

  if not (awsAccessKeyId and awsScrectAccessKey):
    logger.error("Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
    raise MissingAWSKeysInEnvironment("AWS keys are not set")

  # required for terminateInstance function
  config = {}
  config["AWS_ACCESS_KEY_ID"] = awsAccessKeyId
  config["AWS_SECRET_ACCESS_KEY"] = awsScrectAccessKey
  for region in args.region:
    instances = [i.id for i in getInstances(region, awsAccessKeyId,
                                            awsScrectAccessKey, logger)
                 if toBeTerminated(i, logger)]
    if instances:
      config["REGION"] = region
      logger.info("Deleting {}".format(", ".join(instances)))
      for instance in instances:
        terminateInstance(instance, config, logger)
    else:
      logger.info("None of the instances are stale.")
コード例 #6
0
ファイル: numenta_rpm.py プロジェクト: darian19/numenta-apps
 def __init__(self, config):
   # convert dict to object
   if isinstance(config, dict):
     tmpConfig = type('Config', (), {})()
     for k, v in config.items():
       setattr(tmpConfig, k, v)
     config = tmpConfig
   failmsg = None
   if config.sitePackagesTarball:
     if config.flavor != "htm-it":
       failmsg = "--site-packages is only used for htm-it packages."
   if config.flavor == None:
     failmsg = "You must set a type of rpm to create with --rpm-flavor"
   if config.artifacts == []:
     failmsg = "You must specify artifacts in the fakeroot to package."
     if config.flavor == "htm-it":
       failmsg = failmsg + " HTM-IT rpms should specify opt"
     if config.flavor == "infrastructure":
       failmsg = failmsg + " Infrastructure rpms should specify opt"
     if config.flavor == "saltcellar":
       failmsg = failmsg + " Saltcellar rpms should specify srv"
   if failmsg:
     raise InvalidParametersError(failmsg)
   self.config = config
   self.environment = dict(os.environ)
   self.fakeroot = None
   self.logger = diagnostics.initPipelineLogger(name="create-numenta-rpm",
                                                logLevel=config.logLevel)
   self.productsDirectory = None
コード例 #7
0
ファイル: numenta_rpm.py プロジェクト: codeaudit/numenta-apps
 def __init__(self, config):
     # convert dict to object
     if isinstance(config, dict):
         tmpConfig = type('Config', (), {})()
         for k, v in config.items():
             setattr(tmpConfig, k, v)
         config = tmpConfig
     failmsg = None
     if config.sitePackagesTarball:
         if config.flavor != "grok":
             failmsg = "--site-packages is only used for grok packages."
     if config.flavor == None:
         failmsg = "You must set a type of rpm to create with --rpm-flavor"
     if config.artifacts == []:
         failmsg = "You must specify artifacts in the fakeroot to package."
         if config.flavor == "grok":
             failmsg = failmsg + " Grok rpms should specify opt"
         if config.flavor == "infrastructure":
             failmsg = failmsg + " Infrastructure rpms should specify opt"
         if config.flavor == "saltcellar":
             failmsg = failmsg + " Saltcellar rpms should specify srv"
     if failmsg:
         raise InvalidParametersError(failmsg)
     self.config = config
     self.environment = dict(os.environ)
     self.fakeroot = None
     self.logger = diagnostics.initPipelineLogger(name="create-numenta-rpm",
                                                  logLevel=config.logLevel)
     self.productsDirectory = None
コード例 #8
0
ファイル: run_unit_tests.py プロジェクト: dm1973/numenta-apps
def addAndParseArgs(jsonArgs):
  """
    Parse the command line arguments.

    :returns : pipeline, buildWorkspace, grokSha, pipelineParams.
  """
  parser = argparse.ArgumentParser(description="test tool to run Test for "
                                   "Grok. Provide parameters either "
                                   "via path for JSON file or commandline. "
                                   "Provinding both JSON parameter and as a "
                                   "commandline is prohibited. "
                                   "Use help for detailed information for "
                                   "parameters")
  parser.add_argument("--build-workspace", dest="buildWorkspace", type=str,
                      default=os.environ.get("BUILD_WORKSPACE"),
                      help="Common dir prefix for grok")
  parser.add_argument("--pipeline-json", dest="pipelineJson", type=str,
                      help="Path locator for build json file. This file should "
                      "have all parameters required by this script. Provide "
                      "parameters either as a command line parameters or as "
                      "individial parameters")
  parser.add_argument("--log", dest="logLevel", type=str, default="warning",
                      help="Logging level, optional parameter and defaulted to "
                      "level warning")

  args = {}
  if jsonArgs:
    args = jsonArgs
  else:
    args = vars(parser.parse_args())

  global g_logger
  g_logger = initPipelineLogger("run_tests", logLevel=args["logLevel"])

  g_logger.debug(args)
  saneParams = {k:v for k, v in args.items() if v is not None}

  del saneParams["logLevel"]

  if "pipelineJson" in saneParams and len(saneParams) > 1:
    parser.error("Please provide parameters via JSON file or commandline,"
                   "but not both")

  if "pipelineJson" in saneParams:
    with open(args["pipelineJson"]) as paramFile:
      pipelineParams = json.load(paramFile)
  else:
    pipelineParams = saneParams

  if saneParams.get("buildWorkspace"):
    buildWorkspace = saneParams.get("buildWorkspace")
  else:
    buildWorkspace = pipelineParams.get("manifest", {}).get("buildWorkspace")

  if buildWorkspace and pipelineParams:
    return (buildWorkspace, pipelineParams, args["pipelineJson"])
  else:
    parser.error("Please provide all parameters, "
                 "use --help for further details")
def addAndParseArgs(jsonArgs):
  """
    Parse the command line arguments.

    :returns : pipeline, buildWorkspace, htmItSha, pipelineParams.
  """
  parser = argparse.ArgumentParser(description="test tool to run Test for "
                                   "HTM-IT. Provide parameters either "
                                   "via path for JSON file or commandline. "
                                   "Provinding both JSON parameter and as a "
                                   "commandline is prohibited. "
                                   "Use help for detailed information for "
                                   "parameters")
  parser.add_argument("--build-workspace", dest="buildWorkspace", type=str,
                      default=os.environ.get("BUILD_WORKSPACE"),
                      help="Common dir prefix for htm-it")
  parser.add_argument("--pipeline-json", dest="pipelineJson", type=str,
                      help="Path locator for build json file. This file should "
                      "have all parameters required by this script. Provide "
                      "parameters either as a command line parameters or as "
                      "individial parameters")
  parser.add_argument("--log", dest="logLevel", type=str, default="warning",
                      help="Logging level, optional parameter and defaulted to "
                      "level warning")

  args = {}
  if jsonArgs:
    args = jsonArgs
  else:
    args = vars(parser.parse_args())

  global g_logger
  g_logger = initPipelineLogger("run_tests", logLevel=args["logLevel"])

  g_logger.debug(args)
  saneParams = {k:v for k, v in args.items() if v is not None}

  del saneParams["logLevel"]

  if "pipelineJson" in saneParams and len(saneParams) > 1:
    parser.error("Please provide parameters via JSON file or commandline,"
                   "but not both")

  if "pipelineJson" in saneParams:
    with open(args["pipelineJson"]) as paramFile:
      pipelineParams = json.load(paramFile)
  else:
    pipelineParams = saneParams

  if saneParams.get("buildWorkspace"):
    buildWorkspace = saneParams.get("buildWorkspace")
  else:
    buildWorkspace = pipelineParams.get("manifest", {}).get("buildWorkspace")

  if buildWorkspace and pipelineParams:
    return (buildWorkspace, pipelineParams, args["pipelineJson"])
  else:
    parser.error("Please provide all parameters, "
                 "use --help for further details")
コード例 #10
0
ファイル: build.py プロジェクト: darian19/numenta-apps
def addAndParseArgs(jsonArgs):
  """
    Parse the command line arguments or a json blog containing the required
    values.

    :returns: A dict of the parameters needed, as follows:
      {
        "buildWorkspace": "/path/to/build/in",
        "htm-itRemote": "[email protected]:Numenta/numenta-apps.git",
        "htm-itBranch": "master",
        "htm-itSha": "HEAD",
        "pipelineParams": "{dict of parameters}",
        "pipelineJson": "/path/to/json/file"
      }

    :rtype: dict

    :raises parser.error in case wrong combination of arguments or arguments
      are missing.
  """
  parser = argparse.ArgumentParser(description="build tool for HTM-IT")
  parser.add_argument("--pipeline-json", dest="pipelineJson", type=str,
                      help="The JSON file generated by manifest tool.")
  parser.add_argument("--build-workspace", dest="buildWorkspace", type=str,
                      help="Common dir prefix for HTM-IT")
  parser.add_argument("--htm-it-remote", dest="htm-itRemote", type=str,
                      help="The htm-it remote you want to use, e.g.,  "
                           "[email protected]:Numenta/numenta-apps.git")
  parser.add_argument("--htm-it-sha", dest="htm-itSha", type=str,
                      help="HTM-IT SHA that will be built")
  parser.add_argument("--htm-it-branch", dest="htm-itBranch", type=str,
                      help="The branch you are building from")
  parser.add_argument("--release-version", dest="releaseVersion", type=str,
                      help="Current release version, this will be used as base"
                           "version for htm-it and tracking rpm")
  parser.add_argument("--log", dest="logLevel", type=str, default="warning",
                      help="Logging level, by default it takes warning")

  args = {}
  if jsonArgs:
    args = jsonArgs
  else:
    args = vars(parser.parse_args())

  global g_logger
  g_logger = diagnostics.initPipelineLogger(name="build",
                                            logLevel=args["logLevel"])
  saneParams = {k:v for k, v in args.items() if v is not None}
  del saneParams["logLevel"]

  if "pipelineJson" in saneParams and len(saneParams) > 1:
    errorMessage = "Please provide parameters via JSON file or commandline"
    parser.error(errorMessage)

  if "pipelineJson" in saneParams:
    with open(args["pipelineJson"]) as paramFile:
      pipelineParams = json.load(paramFile)
  else:
    pipelineParams = saneParams

  # Setup defaults
  pipelineConfig = {
    "buildWorkspace": None,
    "htm-itRemote": "[email protected]:Numenta/numenta-apps.git",
    "htm-itBranch": "master",
    "htm-itSha": "HEAD",
    "pipelineParams": pipelineParams,
    "pipelineJson": None
  }

  pipelineConfig["buildWorkspace"] = os.environ.get("BUILD_WORKSPACE",
                    pipelineParams.get("buildWorkspace",
                      pipelineParams.get("manifest", {}).get("buildWorkspace")))
  if not pipelineConfig["buildWorkspace"]:
    parser.error("You must set a BUILD_WORKSPACE environment variable "
                 "or pass the --build-workspace argument via the command line "
                 "or json file.")

  pipelineConfig["htm-itRemote"] = pipelineParams.get("htm-itRemote",
                          pipelineParams.get("manifest", {}).get("htm-itRemote"))
  pipelineConfig["htm-itBranch"] = pipelineParams.get("htm-itBranch",
                          pipelineParams.get("manifest", {}).get("htm-itBranch"))
  pipelineConfig["htm-itSha"] = pipelineParams.get("htm-itSha",
                          pipelineParams.get("manifest", {}).get("htm-itSha"))

  pipelineConfig["pipelineJson"] = args["pipelineJson"]

  return pipelineConfig
コード例 #11
0
ファイル: bake_ami.py プロジェクト: darian19/numenta-apps
def addAndParseArgs(jsonArgs):
  """
    Parse, sanitize and process command line arguments.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson": <PIPELINE_JSON_PATH>,
            "logLevel": <LOG_LEVEL>}


    :returns: A dict containing releaseVersion, buildWorkspace, htm-itSha,
    deployTrack, htm-itDeployTrack, amiName (all strings).

    Example dict:
    {
      "releaseVersion": "1.7.0,
      "buildWorkspace": "/path/to/Workspace",
      "htm-itSha": "0xDEADBEEF",
      "deployTrack": "production",
      "htm-itDeployTrack": "production",
      "amiName": "htm-it-pipeline"
    }

    :rtype: dict of strings

    :raises MissingCommandLineArgument when missing expected parameters
    :raises ConflictingCommandLineArguments when pipelineJson and other
      conflicting parameters set
  """
  parser = argparse.ArgumentParser(description="Tool to bake AMI with "
                                   "given version of HTM-IT")
  parser.add_argument("--htm-it-sha", dest="htm-itSha", type=str,
                      help="SHA from HTM-IT used for this build")
  parser.add_argument("--release-version", dest="releaseVersion", type=str,
                      help="Current release version, this will be used as base")
  parser.add_argument("--htm-it-deploy-track", dest="htm-itDeployTrack", type=str,
                      help="Deploy track for htm-it RPM")
  parser.add_argument("--ami-name", dest="amiName", type=str,
                      help="Descriptive key to be used with auto generated ami"
                      "name")
  parser.add_argument("--build-workspace", dest="buildWorkspace", type=str,
                      help="Common dir prefix for htm-it")
  parser.add_argument("--pipeline-json", dest="pipelineJson", type=str,
                      help="Path locator for build json file. This file should"
                      "have all parameters required by this script. Provide"
                      "parameters either as a command line parameters or as"
                      "individial parameters")
  parser.add_argument("--log", dest="logLevel", type=str, default="warning",
                      help="Logging level, optional parameter and defaulted to"
                      "level warning")

  args = {}
  if jsonArgs:
    args = jsonArgs
  else:
    args = vars(parser.parse_args())

  global g_logger
  g_logger = initPipelineLogger("bake_ami", logLevel=args["logLevel"])
  saneParams = {k:v for k, v in args.items() if v is not None}
  del saneParams["logLevel"]

  if "pipelineJson" in saneParams and len(saneParams) > 1:
    errorMessage = "Please provide parameters via JSON file or commandline"
    g_logger.error(errorMessage)
    parser.error(errorMessage)

  if "pipelineJson" in saneParams:
    with open(args["pipelineJson"]) as paramFile:
      pipelineParams = json.load(paramFile)
  else:
    pipelineParams = saneParams

  releaseVersion = pipelineParams.get("releaseVersion",
                     pipelineParams.get("manifest", {}).get("releaseVersion"))

  buildWorkspace = os.environ.get("BUILD_WORKSPACE",
                     pipelineParams.get("buildWorkspace",
                     pipelineParams.get("manifest", {}).get("buildWorkspace")))

  htm-itSha = pipelineParams.get("htm-itSha",
              pipelineParams.get("build", {}).get("htm-itSha"))
  amiName = pipelineParams.get("amiName",
              pipelineParams.get("build", {}).get("amiName"))
  pipelineJson = args["pipelineJson"]
  if releaseVersion and buildWorkspace and htm-itSha and amiName:
    return {"releaseVersion": releaseVersion,
            "buildWorkspace": buildWorkspace,
            "htm-itSha": htm-itSha,
            "amiName": amiName,
            "pipelineJson": pipelineJson}
  else:
    parser.error("Please provide all parameters, "
                 "Use --help for further details")
コード例 #12
0
def addAndParseArgs(jsonArgs):
    """
    Parse, sanitize and process command line arguments.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson": <PIPELINE_JSON_PATH>,
            "logLevel": <LOG_LEVEL>}


    :returns: A dict containing releaseVersion, buildWorkspace, htmItSha,
    deployTrack, htmItDeployTrack, amiName (all strings).

    Example dict:
    {
      "releaseVersion": "1.7.0,
      "buildWorkspace": "/path/to/Workspace",
      "htmItSha": "0xDEADBEEF",
      "deployTrack": "production",
      "htmItDeployTrack": "production",
      "amiName": "htm-it-pipeline"
    }

    :rtype: dict of strings

    :raises MissingCommandLineArgument when missing expected parameters
    :raises ConflictingCommandLineArguments when pipelineJson and other
      conflicting parameters set
  """
    parser = argparse.ArgumentParser(description="Tool to bake AMI with "
                                     "given version of HTM-IT")
    parser.add_argument("--htm-it-sha",
                        dest="htmItSha",
                        type=str,
                        help="SHA from HTM-IT used for this build")
    parser.add_argument(
        "--release-version",
        dest="releaseVersion",
        type=str,
        help="Current release version, this will be used as base")
    parser.add_argument("--htm-it-deploy-track",
                        dest="htmItDeployTrack",
                        type=str,
                        help="Deploy track for htm-it RPM")
    parser.add_argument(
        "--ami-name",
        dest="amiName",
        type=str,
        help="Descriptive key to be used with auto generated ami"
        "name")
    parser.add_argument("--build-workspace",
                        dest="buildWorkspace",
                        type=str,
                        help="Common dir prefix for htm-it")
    parser.add_argument(
        "--pipeline-json",
        dest="pipelineJson",
        type=str,
        help="Path locator for build json file. This file should"
        "have all parameters required by this script. Provide"
        "parameters either as a command line parameters or as"
        "individial parameters")
    parser.add_argument(
        "--log",
        dest="logLevel",
        type=str,
        default="warning",
        help="Logging level, optional parameter and defaulted to"
        "level warning")

    args = {}
    if jsonArgs:
        args = jsonArgs
    else:
        args = vars(parser.parse_args())

    global g_logger
    g_logger = initPipelineLogger("bake_ami", logLevel=args["logLevel"])
    saneParams = {k: v for k, v in args.items() if v is not None}
    del saneParams["logLevel"]

    if "pipelineJson" in saneParams and len(saneParams) > 1:
        errorMessage = "Please provide parameters via JSON file or commandline"
        g_logger.error(errorMessage)
        parser.error(errorMessage)

    if "pipelineJson" in saneParams:
        with open(args["pipelineJson"]) as paramFile:
            pipelineParams = json.load(paramFile)
    else:
        pipelineParams = saneParams

    releaseVersion = pipelineParams.get(
        "releaseVersion",
        pipelineParams.get("manifest", {}).get("releaseVersion"))

    buildWorkspace = os.environ.get(
        "BUILD_WORKSPACE",
        pipelineParams.get(
            "buildWorkspace",
            pipelineParams.get("manifest", {}).get("buildWorkspace")))

    htmItSha = pipelineParams.get(
        "htmItSha",
        pipelineParams.get("build", {}).get("htmItSha"))
    amiName = pipelineParams.get(
        "amiName",
        pipelineParams.get("build", {}).get("amiName"))
    pipelineJson = args["pipelineJson"]
    if releaseVersion and buildWorkspace and htmItSha and amiName:
        return {
            "releaseVersion": releaseVersion,
            "buildWorkspace": buildWorkspace,
            "htmItSha": htmItSha,
            "amiName": amiName,
            "pipelineJson": pipelineJson
        }
    else:
        parser.error("Please provide all parameters, "
                     "Use --help for further details")
コード例 #13
0
def addAndParseArgs(jsonArgs):
    """
  This method parses the command line paramaters passed to the script.

  :returns: logger, buildWorkspace, grokSha, releaseVersion,
            pipelineParams, pipelineJson
  """

    parser = argparse.ArgumentParser(description="Package tool for creating"
                                     " grok rpms")
    parser.add_argument("--pipeline-json",
                        dest="pipelineJson",
                        type=str,
                        help="The manifest file name")
    parser.add_argument("--build-workspace",
                        dest="buildWorkspace",
                        type=str,
                        help="Common dir prefix for grok")
    parser.add_argument("--grokSha",
                        dest="grokSha",
                        type=str,
                        help="The grokSha for which are creating rpm")
    parser.add_argument("--grok-remote",
                        dest="grokRemote",
                        type=str,
                        help="The grok remote you want to use, "
                        "e.g. [email protected]:Numenta/numenta-apps.git")
    parser.add_argument("--unit-test-status",
                        dest="testStatus",
                        type=str,
                        help="Unit test success status")
    parser.add_argument(
        "--release-version",
        dest="releaseVersion",
        type=str,
        help="Current release version, this will be used as base"
        "version for grok and tracking rpm")
    parser.add_argument("--log",
                        dest="logLevel",
                        type=str,
                        default="warning",
                        help="Logging level")

    args = {}
    if jsonArgs:
        args = jsonArgs
    else:
        args = vars(parser.parse_args())
    global g_logger

    g_logger = log.initPipelineLogger("packaging", logLevel=args["logLevel"])

    saneParams = {k: v for k, v in args.items() if v is not None}
    del saneParams["logLevel"]

    if "pipelineJson" in saneParams and len(saneParams) > 1:
        parser.error("Please provide parameters via JSON file or commandline,"
                     "but not both")

    if "pipelineJson" in saneParams:
        with open(args["pipelineJson"]) as paramFile:
            pipelineParams = json.load(paramFile)
    else:
        pipelineParams = saneParams

    g_logger.info("pipeline parameters:%s", pipelineParams)

    buildWorkspace = os.environ.get(
        "BUILD_WORKSPACE",
        pipelineParams.get(
            "buildWorkspace",
            pipelineParams.get("manifest", {}).get("buildWorkspace")))
    grokSha = pipelineParams.get(
        "grokSha",
        pipelineParams.get("build", {}).get("grokSha"))
    unitTestStatus = pipelineParams.get(
        "testStatus",
        pipelineParams.get("test", {}).get("testStatus"))
    releaseVersion = pipelineParams.get(
        "releaseVersion",
        pipelineParams.get("manifest", {}).get("releaseVersion"))
    grokRemote = pipelineParams.get(
        "grokRemote",
        pipelineParams.get("manifest", {}).get("grokRemote"))

    if platform.system() not in "Linux":
        g_logger.error(
            "RPM's will be built only on Linux (CentOS). Bailing out.")
        raise exceptions.FailedToCreateRPMOnNonLinuxBox("RPM's will not build")

    if not unitTestStatus:
        g_logger.error("Unit Test failed. RPM's will not be created.")
        raise exceptions.UnittestFailed("Unit Test failed")

    if buildWorkspace and grokSha and grokRemote:
        return (buildWorkspace, grokSha, releaseVersion, pipelineParams,
                args["pipelineJson"], grokRemote)
    else:
        parser.error("Please provide all parameters, "
                     "Use --help for further details")
コード例 #14
0
ファイル: package.py プロジェクト: darian19/numenta-apps
def addAndParseArgs(jsonArgs):
  """
  This method parses the command line paramaters passed to the script.

  :returns: logger, buildWorkspace, htm-itSha, releaseVersion,
            pipelineParams, pipelineJson
  """

  parser = argparse.ArgumentParser(description="Package tool for creating"
                                               " htm-it rpms")
  parser.add_argument("--pipeline-json", dest="pipelineJson", type=str,
                      help="The manifest file name")
  parser.add_argument("--build-workspace", dest="buildWorkspace", type=str,
                      help="Common dir prefix for htm-it")
  parser.add_argument("--htm-itSha", dest="htm-itSha", type=str,
                      help="The htm-itSha for which are creating rpm")
  parser.add_argument("--htm-it-remote", dest="htm-itRemote", type=str,
                      help="The htm-it remote you want to use, "
                           "e.g. [email protected]:Numenta/numenta-apps.git")
  parser.add_argument("--unit-test-status", dest="testStatus", type=str,
                      help="Unit test success status")
  parser.add_argument("--release-version", dest="releaseVersion", type=str,
                       help="Current release version, this will be used as base"
                       "version for htm-it and tracking rpm")
  parser.add_argument("--log", dest="logLevel", type=str, default="warning",
                      help="Logging level")

  args = {}
  if jsonArgs:
    args = jsonArgs
  else:
    args = vars(parser.parse_args())
  global g_logger

  g_logger = log.initPipelineLogger("packaging", logLevel=args["logLevel"])

  saneParams = {k:v for k, v in args.items() if v is not None}
  del saneParams["logLevel"]

  if "pipelineJson" in saneParams and len(saneParams) > 1:
    parser.error("Please provide parameters via JSON file or commandline,"
                   "but not both")

  if "pipelineJson" in saneParams:
    with open(args["pipelineJson"]) as paramFile:
      pipelineParams = json.load(paramFile)
  else:
    pipelineParams = saneParams

  g_logger.info("pipeline parameters:%s", pipelineParams)

  buildWorkspace = os.environ.get("BUILD_WORKSPACE",
                     pipelineParams.get("buildWorkspace",
                     pipelineParams.get("manifest", {}).get("buildWorkspace")))
  htm-itSha = pipelineParams.get("htm-itSha",
              pipelineParams.get("build", {}).get("htm-itSha"))
  unitTestStatus = pipelineParams.get("testStatus",
                    pipelineParams.get("test", {}).get("testStatus"))
  releaseVersion = pipelineParams.get("releaseVersion",
                    pipelineParams.get("manifest", {}).get("releaseVersion"))
  htm-itRemote = pipelineParams.get("htm-itRemote",
                    pipelineParams.get("manifest", {}).get("htm-itRemote"))

  if platform.system() not in "Linux":
    g_logger.error("RPM's will be built only on Linux (CentOS). Bailing out.")
    raise exceptions.FailedToCreateRPMOnNonLinuxBox("RPM's will not build")

  if not unitTestStatus:
    g_logger.error("Unit Test failed. RPM's will not be created.")
    raise exceptions.UnittestFailed("Unit Test failed")

  if buildWorkspace and htm-itSha and htm-itRemote:
    return (buildWorkspace, htm-itSha, releaseVersion,
            pipelineParams, args["pipelineJson"], htm-itRemote)
  else:
    parser.error("Please provide all parameters, "
                 "Use --help for further details")
コード例 #15
0
ファイル: rpm-creator.py プロジェクト: dm1973/numenta-apps
def parseArgs():
  """
    Parse the command line arguments

    :return: Parsed arguments
    :rtype argparse.Namespace
  """
  parser = argparse.ArgumentParser(description="RPM Creator")
  parser.add_argument("--source-dir", action="store", dest="source_dir")
  parser.add_argument("--git-url",
                      action="store",
                      dest="gitURL",
                      help="git repository to package")
  parser.add_argument("--depends",
                      action="store",
                      dest="depends",
                      help="comma separated dependency list",
                      default=None)
  parser.add_argument("--package-name",
                      action="store",
                      dest="package_name",
                      help="rpm package name - don't include SHA or version")
  parser.add_argument("--repo-directory",
                      action="store",
                      dest="repo_directory",
                      help="name you want repo checked out as")
  parser.add_argument("--install-directory",
                      action="store",
                      dest="install_directory",
                      default='/opt',
                      help="where to install on target systems - default /opt")
  parser.add_argument("--sha", action="store", dest="sha", default=None)
  parser.add_argument("--base-version",
                      action="store",
                      dest="base_version",
                      default="0.1")
  parser.add_argument("--debug", action="store", dest="debug", default=0)
  parser.add_argument("--epoch", action="store", dest="epoch", default=0)
  parser.add_argument("--no-clean",
                      action="store",
                      dest="no_clean",
                      default=None)
  parser.add_argument("--arch", action="store", dest="arch", default=None)
  parser.add_argument("--desc",
                      action="store",
                      nargs='+',
                      dest="desc",
                      default=None)
  parser.add_argument("--directory-purge-list",
                      action="store",
                      dest="directory_purge_list",
                      default=None)
  parser.add_argument("--timestamp", action="store", dest="timestamp")
  parser.add_argument("--use-git-tags", action="store", dest="useGitTag",
                      help="read version data from the repo's git tags")
  parser.add_argument("--release", action="store", dest="release", default=0)
  parser.add_argument("--rpm-directory",
                      action="store",
                      dest="rpm_directory",
                      help="directory to put output rpm in")
  parser.add_argument("--workdir", action="store", dest="work_dir",
                      default="/opt/numenta/scratch",
                      help="The directory you want fpm to do its work in, where"
                           "'work' is any filecopying, downloading, etc."
                           "Roughly any scratch space fpm needs to build "
                           "your package.")
  parser.add_argument("--after-install", action="store",
                      dest="after_install", default=None,
                      help="post install script after rpm is installed")
  parser.add_argument("--log", dest="logLevel", type=str, default="debug",
                      help="Logging level")
  args = parser.parse_args()

  global g_logger #pylint: disable=W0603
  # Intializing logger
  g_logger = diagnostics.initPipelineLogger(name="rpm-creator",
                                            logLevel=args.logLevel)

  if (not args.gitURL) and (not args.source_dir):
    parser.error("You must specify a repo to clone with --clone-source, or a"
                   "source directory with --source-directory")
  if not args.install_directory:
    parser.error("Specify a directory to install the repo into with"
                 "--install-directory, e.g. /opt")
  if not args.repo_directory:
    parser.error("Please specify a base directory with --repo-directory,"
                 "e.g. grok")
  if not args.package_name:
    parser.error("You must specify a package name with --package-name")
  if not args.base_version and not args.useGitTag:
    parser.error("Either specify a base version or --use-git-tags to load"
                 "from the, repo git tags")
  if args.gitURL and args.source_dir:
    parser.error("--clone-source and --source-dir are incompatible "
                 "with each other")
  if args.useGitTag and args.source_dir:
    parser.error("--use-git-tags and --source-dir are incompatible "
                 "with each other")
  if args.timestamp:
    VERSION_TIMESTAMP = args.timestamp #pylint: disable=C0103,W0612
  else:
    VERSION_TIMESTAMP = time.strftime("%Y%m%d.%H.%M.%S") #pylint: disable=C0103

  return args
コード例 #16
0
def parseArgs():
    """
    Parse the command line arguments

    :return: Parsed arguments
    :rtype argparse.Namespace
  """
    parser = argparse.ArgumentParser(description="RPM Creator")
    parser.add_argument("--source-dir", action="store", dest="source_dir")
    parser.add_argument("--git-url",
                        action="store",
                        dest="gitURL",
                        help="git repository to package")
    parser.add_argument("--depends",
                        action="store",
                        dest="depends",
                        help="comma separated dependency list",
                        default=None)
    parser.add_argument("--package-name",
                        action="store",
                        dest="package_name",
                        help="rpm package name - don't include SHA or version")
    parser.add_argument("--repo-directory",
                        action="store",
                        dest="repo_directory",
                        help="name you want repo checked out as")
    parser.add_argument(
        "--install-directory",
        action="store",
        dest="install_directory",
        default='/opt',
        help="where to install on target systems - default /opt")
    parser.add_argument("--sha", action="store", dest="sha", default=None)
    parser.add_argument("--base-version",
                        action="store",
                        dest="base_version",
                        default="0.1")
    parser.add_argument("--debug", action="store", dest="debug", default=0)
    parser.add_argument("--epoch", action="store", dest="epoch", default=0)
    parser.add_argument("--no-clean",
                        action="store",
                        dest="no_clean",
                        default=None)
    parser.add_argument("--arch", action="store", dest="arch", default=None)
    parser.add_argument("--desc",
                        action="store",
                        nargs='+',
                        dest="desc",
                        default=None)
    parser.add_argument("--directory-purge-list",
                        action="store",
                        dest="directory_purge_list",
                        default=None)
    parser.add_argument("--timestamp", action="store", dest="timestamp")
    parser.add_argument("--use-git-tags",
                        action="store",
                        dest="useGitTag",
                        help="read version data from the repo's git tags")
    parser.add_argument("--release", action="store", dest="release", default=0)
    parser.add_argument("--rpm-directory",
                        action="store",
                        dest="rpm_directory",
                        help="directory to put output rpm in")
    parser.add_argument(
        "--workdir",
        action="store",
        dest="work_dir",
        default="/opt/numenta/scratch",
        help="The directory you want fpm to do its work in, where"
        "'work' is any filecopying, downloading, etc."
        "Roughly any scratch space fpm needs to build "
        "your package.")
    parser.add_argument("--after-install",
                        action="store",
                        dest="after_install",
                        default=None,
                        help="post install script after rpm is installed")
    parser.add_argument("--log",
                        dest="logLevel",
                        type=str,
                        default="debug",
                        help="Logging level")
    args = parser.parse_args()

    global g_logger
    # Intializing logger
    g_logger = log.initPipelineLogger("rpm-creator", logLevel=args.logLevel)

    if (not args.gitURL) and (not args.source_dir):
        parser.error(
            "You must specify a repo to clone with --clone-source, or a"
            "source directory with --source-directory")
    if not args.install_directory:
        parser.error("Specify a directory to install the repo into with"
                     "--install-directory, e.g. /opt")
    if not args.repo_directory:
        parser.error("Please specify a base directory with --repo-directory,"
                     "e.g. grok")
    if not args.package_name:
        parser.error("You must specify a package name with --package-name")
    if not args.base_version and not args.useGitTag:
        parser.error("Either specify a base version or --use-git-tags to load"
                     "from the, repo git tags")
    if args.gitURL and args.source_dir:
        parser.error("--clone-source and --source-dir are incompatible "
                     "with each other")
    if args.useGitTag and args.source_dir:
        parser.error("--use-git-tags and --source-dir are incompatible "
                     "with each other")
    if args.timestamp:
        VERSION_TIMESTAMP = args.timestamp
    else:
        VERSION_TIMESTAMP = time.strftime("%Y%m%d.%H.%M.%S")

    return args
コード例 #17
0
def main():
    """
    This is the main class.
  """
    args = parseArgs()

    global g_logger
    g_logger = initPipelineLogger("run-integration-tests",
                                  logLevel=args.logLevel)

    if not (os.environ.get("AWS_ACCESS_KEY_ID")
            and os.environ.get("AWS_SECRET_ACCESS_KEY")):
        g_logger.error(
            "Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
        raise MissingAWSKeysInEnvironment("AWS keys are not set")
    else:
        g_config["AWS_ACCESS_KEY_ID"] = os.environ["AWS_ACCESS_KEY_ID"]
        g_config["AWS_SECRET_ACCESS_KEY"] = os.environ["AWS_SECRET_ACCESS_KEY"]

    #launching instance with the give AMI
    publicDnsName, instanceId = launchInstance(args.ami, g_config, g_logger)

    resultsDir = prepareResultsDir()
    serverKey = os.path.join("~", ".ssh", g_config["KEY"] + ".pem")

    # The calls in this function are not signal-safe. However, the expectation is
    # that making them signal safe would be overly burdensome at this time. If
    # issues arise later, then we'll figure out what the right approach is at that
    # time.
    def handleSignalInterrupt(signal, _frame):
        g_logger.error("Received interrupt signal %s", signal)
        if instanceId:
            g_logger.error("Terminating instance %s", instanceId)
            terminateInstance(instanceId, g_config, g_logger)

    signal.signal(signal.SIGINT, handleSignalInterrupt)
    signal.signal(signal.SIGTERM, handleSignalInterrupt)

    with settings(host_string=publicDnsName,
                  key_filename=serverKey,
                  user=g_config["USER"],
                  connection_attempts=30,
                  warn_only=True):
        g_logger.info("Connected to %s using %s.pem", publicDnsName, serverKey)
        # Run Integration tests
        try:
            waitForGrokServerToBeReady(publicDnsName, serverKey,
                                       g_config["USER"], g_logger)
            getApiKey(instanceId, publicDnsName, g_config, g_logger)
            # TODO remove the exports; keeping them intact for now because some of the
            # integration tests use the ConfigAttributePatch which reads these values
            # from environment.
            runTestCommand = (
                "export AWS_ACCESS_KEY_ID=%s" % os.environ["AWS_ACCESS_KEY_ID"]
                + " && export AWS_SECRET_ACCESS_KEY=%s" %
                os.environ["AWS_SECRET_ACCESS_KEY"] +
                " && source /etc/grok/supervisord.vars" + " && cd $GROK_HOME" +
                " && ./run_tests.sh --integration --language py" +
                " --results xunit jenkins")
            run(runTestCommand)
            g_logger.debug("Retreiving results")
            get("%s" % (g_remotePath), resultsDir)
        except Exception:
            g_logger.exception("Caught exception in run_tests")
            stopInstance(instanceId, g_config, g_logger)
            raise
        else:
            g_logger.info("Tests have finished.")

            # Rename the results file to be job specific
            newResultsFile = "grok_integration_test_results_%s.xml" % getBuildNumber(
            )
            if os.path.exists(os.path.join(resultsDir, "results.xml")):
                shutil.move(os.path.join(resultsDir, "results.xml"),
                            os.path.join(resultsDir, newResultsFile))
            if os.path.exists(os.path.join(resultsDir, newResultsFile)):
                successStatus = analyzeResults("%s/%s" %
                                               (resultsDir, newResultsFile))
            else:
                g_logger.error("Could not find results file: %s",
                               newResultsFile)
                successStatus = False

            if args.pipelineJson:
                with open(args.pipelineJson) as jsonFile:
                    pipelineParams = json.load(jsonFile)

                pipelineParams["integration_test"] = {
                    "testStatus": successStatus
                }
                with open(args.pipelineJson, "w") as jsonFile:
                    jsonFile.write(
                        json.dumps(pipelineParams, ensure_ascii=False))

            if successStatus:
                postTestRunAction(instanceId, terminate=True, **g_config)
            else:
                postTestRunAction(instanceId, terminate=False, **g_config)
                raise TestsFailed("Integration test failed")
コード例 #18
0
def main():
  """
    This is the main class.
  """
  args = parseArgs()

  global g_logger
  g_logger = initPipelineLogger("run-integration-tests", logLevel=args.logLevel)

  if not (os.environ.get("AWS_ACCESS_KEY_ID") and
          os.environ.get("AWS_SECRET_ACCESS_KEY")):
    g_logger.error("Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
    raise MissingAWSKeysInEnvironment("AWS keys are not set")
  else:
    g_config["AWS_ACCESS_KEY_ID"] = os.environ["AWS_ACCESS_KEY_ID"]
    g_config["AWS_SECRET_ACCESS_KEY"] = os.environ["AWS_SECRET_ACCESS_KEY"]

  #launching instance with the give AMI
  publicDnsName, instanceId = launchInstance(args.ami, g_config, g_logger)

  resultsDir = prepareResultsDir()
  serverKey = os.path.join("~", ".ssh", g_config["KEY"] + ".pem")

  # The calls in this function are not signal-safe. However, the expectation is
  # that making them signal safe would be overly burdensome at this time. If
  # issues arise later, then we'll figure out what the right approach is at that
  # time.
  def handleSignalInterrupt(signal, _frame):
    g_logger.error("Received interrupt signal %s", signal)
    if instanceId:
      g_logger.error("Terminating instance %s", instanceId)
      terminateInstance(instanceId, g_config, g_logger)

  signal.signal(signal.SIGINT, handleSignalInterrupt)
  signal.signal(signal.SIGTERM, handleSignalInterrupt)

  with settings(host_string=publicDnsName,
                key_filename=serverKey,
                user=g_config["USER"], connection_attempts=30, warn_only=True):
    g_logger.info("Connected to %s using %s.pem", publicDnsName, serverKey)
    # Run Integration tests
    try:
      waitForGrokServerToBeReady(publicDnsName, serverKey, g_config["USER"],
                                 g_logger)
      getApiKey(instanceId, publicDnsName, g_config, g_logger)
      # TODO remove the exports; keeping them intact for now because some of the
      # integration tests use the ConfigAttributePatch which reads these values
      # from environment.
      runTestCommand = ("export AWS_ACCESS_KEY_ID=%s"
                        % os.environ["AWS_ACCESS_KEY_ID"] +
                        " && export AWS_SECRET_ACCESS_KEY=%s"
                        % os.environ["AWS_SECRET_ACCESS_KEY"] +
                        " && source /etc/grok/supervisord.vars" +
                        " && cd $GROK_HOME" +
                        " && ./run_tests.sh --integration --language py" +
                        " --results xunit jenkins")
      run(runTestCommand)
      g_logger.debug("Retreiving results")
      get("%s" % (g_remotePath), resultsDir)
    except Exception:
      g_logger.exception("Caught exception in run_tests")
      stopInstance(instanceId, g_config, g_logger)
      raise
    else:
      g_logger.info("Tests have finished.")

      # Rename the results file to be job specific
      newResultsFile = "grok_integration_test_results_%s.xml" % getBuildNumber()
      if os.path.exists(os.path.join(resultsDir, "results.xml")):
        shutil.move(os.path.join(resultsDir, "results.xml"),
                    os.path.join(resultsDir, newResultsFile))
      if os.path.exists(os.path.join(resultsDir, newResultsFile)):
        successStatus = analyzeResults("%s/%s" % (resultsDir, newResultsFile))
      else:
        g_logger.error("Could not find results file: %s", newResultsFile)
        successStatus = False

      if args.pipelineJson:
        with open(args.pipelineJson) as jsonFile:
          pipelineParams = json.load(jsonFile)

        pipelineParams["integration_test"] = {"testStatus": successStatus}
        with open(args.pipelineJson, "w") as jsonFile:
          jsonFile.write(json.dumps(pipelineParams, ensure_ascii=False))

      if successStatus:
        postTestRunAction(instanceId, terminate=True, **g_config)
      else:
        postTestRunAction(instanceId, terminate=False, **g_config)
        raise TestsFailed("Integration test failed")
コード例 #19
0
def addAndParseArgs(jsonArgs):
  """
    Parse the command line arguments or a json blog containing the required
    values.

    :returns: A dict of the parameters needed, as follows:
      {
        "buildWorkspace": "/path/to/build/in",
        "htmitRemote": "[email protected]:Numenta/numenta-apps.git",
        "htmitBranch": "master",
        "htmItSha": "HEAD",
        "pipelineParams": "{dict of parameters}",
        "pipelineJson": "/path/to/json/file"
      }

    :rtype: dict

    :raises parser.error in case wrong combination of arguments or arguments
      are missing.
  """
  parser = argparse.ArgumentParser(description="build tool for HTM-IT")
  parser.add_argument("--pipeline-json", dest="pipelineJson", type=str,
                      help="The JSON file generated by manifest tool.")
  parser.add_argument("--build-workspace", dest="buildWorkspace", type=str,
                      help="Common dir prefix for HTM-IT")
  parser.add_argument("--htm-it-remote", dest="htmitRemote", type=str,
                      help="The htm-it remote you want to use, e.g.,  "
                           "[email protected]:Numenta/numenta-apps.git")
  parser.add_argument("--htm-it-sha", dest="htmItSha", type=str,
                      help="HTM-IT SHA that will be built")
  parser.add_argument("--htm-it-branch", dest="htmitBranch", type=str,
                      help="The branch you are building from")
  parser.add_argument("--release-version", dest="releaseVersion", type=str,
                      help="Current release version, this will be used as base"
                           "version for htm-it and tracking rpm")
  parser.add_argument("--log", dest="logLevel", type=str, default="warning",
                      help="Logging level, by default it takes warning")

  args = {}
  if jsonArgs:
    args = jsonArgs
  else:
    args = vars(parser.parse_args())

  global g_logger
  g_logger = diagnostics.initPipelineLogger(name="build",
                                            logLevel=args["logLevel"])
  saneParams = {k:v for k, v in args.items() if v is not None}
  del saneParams["logLevel"]

  if "pipelineJson" in saneParams and len(saneParams) > 1:
    errorMessage = "Please provide parameters via JSON file or commandline"
    parser.error(errorMessage)

  if "pipelineJson" in saneParams:
    with open(args["pipelineJson"]) as paramFile:
      pipelineParams = json.load(paramFile)
  else:
    pipelineParams = saneParams

  # Setup defaults
  pipelineConfig = {
    "buildWorkspace": None,
    "htmitRemote": "[email protected]:Numenta/numenta-apps.git",
    "htmitBranch": "master",
    "htmItSha": "HEAD",
    "pipelineParams": pipelineParams,
    "pipelineJson": None
  }

  pipelineConfig["buildWorkspace"] = os.environ.get("BUILD_WORKSPACE",
                    pipelineParams.get("buildWorkspace",
                      pipelineParams.get("manifest", {}).get("buildWorkspace")))
  if not pipelineConfig["buildWorkspace"]:
    parser.error("You must set a BUILD_WORKSPACE environment variable "
                 "or pass the --build-workspace argument via the command line "
                 "or json file.")

  pipelineConfig["htmitRemote"] = pipelineParams.get("htmitRemote",
                          pipelineParams.get("manifest", {}).get("htmitRemote"))
  pipelineConfig["htmitBranch"] = pipelineParams.get("htmitBranch",
                          pipelineParams.get("manifest", {}).get("htmitBranch"))
  pipelineConfig["htmItSha"] = pipelineParams.get("htmItSha",
                          pipelineParams.get("manifest", {}).get("htmItSha"))

  pipelineConfig["pipelineJson"] = args["pipelineJson"]

  return pipelineConfig