Exemplo n.º 1
0
def main(args):
  """
  This function will terminate the stale instances running the regions
  passed as parameter to the script.
  Instances which satisfy following conditions will be terminated.
  - Name of the instance starts with 'vertis_'. Instances starting with
    'vertis_donotremove_' will not be terminated.
  - Instances which are running for more than three hours, and have blank
    'Name' tag.
  """

  logger = initPipelineLogger("janitor_ec2", logLevel=args.logLevel)

  awsAccessKeyId = os.environ.get("AWS_ACCESS_KEY_ID")
  awsScrectAccessKey = os.environ.get("AWS_SECRET_ACCESS_KEY")

  if not (awsAccessKeyId and awsScrectAccessKey):
    logger.error("Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
    raise MissingAWSKeysInEnvironment("AWS keys are not set")

  # required for terminateInstance function
  config = {}
  config["AWS_ACCESS_KEY_ID"] = awsAccessKeyId
  config["AWS_SECRET_ACCESS_KEY"] = awsScrectAccessKey
  for region in args.region:
    instances = [i.id for i in getInstances(region, awsAccessKeyId,
                                            awsScrectAccessKey, logger)
                 if toBeTerminated(i, logger)]
    if instances:
      config["REGION"] = region
      logger.info("Deleting {}".format(", ".join(instances)))
      for instance in instances:
        terminateInstance(instance, config, logger)
    else:
      logger.info("None of the instances are stale.")
Exemplo n.º 2
0
 def __init__(self, config):
   # convert dict to object
   if isinstance(config, dict):
     tmpConfig = type('Config', (), {})()
     for k, v in config.items():
       setattr(tmpConfig, k, v)
     config = tmpConfig
   failmsg = None
   if config.sitePackagesTarball:
     if config.flavor != "grok":
       failmsg = "--site-packages is only used for grok packages."
   if config.flavor == None:
     failmsg = "You must set a type of rpm to create with --rpm-flavor"
   if config.artifacts == []:
     failmsg = "You must specify artifacts in the fakeroot to package."
     if config.flavor == "grok":
       failmsg = failmsg + " Grok rpms should specify opt"
     if config.flavor == "infrastructure":
       failmsg = failmsg + " Infrastructure rpms should specify opt"
     if config.flavor == "saltcellar":
       failmsg = failmsg + " Saltcellar rpms should specify srv"
   if failmsg:
     raise InvalidParametersError(failmsg)
   self.config = config
   self.environment = dict(os.environ)
   self.fakeroot = None
   self.logger = log.initPipelineLogger(name="create-numenta-rpm",
                                        logLevel=config.logLevel)
   self.productsDirectory = None
Exemplo n.º 3
0
 def __init__(self, config):
     # convert dict to object
     if isinstance(config, dict):
         tmpConfig = type('Config', (), {})()
         for k, v in config.items():
             setattr(tmpConfig, k, v)
         config = tmpConfig
     failmsg = None
     if config.sitePackagesTarball:
         if config.flavor != "YOMP":
             failmsg = "--site-packages is only used for YOMP packages."
     if config.flavor == None:
         failmsg = "You must set a type of rpm to create with --rpm-flavor"
     if config.artifacts == []:
         failmsg = "You must specify artifacts in the fakeroot to package."
         if config.flavor == "YOMP":
             failmsg = failmsg + " YOMP rpms should specify opt"
         if config.flavor == "infrastructure":
             failmsg = failmsg + " Infrastructure rpms should specify opt"
         if config.flavor == "saltcellar":
             failmsg = failmsg + " Saltcellar rpms should specify srv"
     if failmsg:
         raise InvalidParametersError(failmsg)
     self.config = config
     self.environment = dict(os.environ)
     self.fakeroot = None
     self.logger = log.initPipelineLogger(name="create-numenta-rpm",
                                          logLevel=config.logLevel)
     self.productsDirectory = None
Exemplo n.º 4
0
def main(args):
  """
    Main function for the pipeline. Executes all sub-tasks

    :param args: Parsed command line arguments
  """
  logger = initPipelineLogger("manifest", logLevel=args.logLevel)
  buildWorkspace = os.environ.get("BUILD_WORKSPACE", None)
  if not buildWorkspace:
    baseDir = jenkins.getWorkspace()
    buildId = jenkins.getBuildNumber()
    buildWorkspace = mkdtemp(prefix=buildId, dir=baseDir)

  manifest = vars(args)
  # Update buildWorkspace in manifest section for pipelineJson
  manifest.update({"buildWorkspace": buildWorkspace})
  manifestEnv = {"manifest": manifest}

  with open("%s/%s_pipeline.json" % (buildWorkspace, args.pipeline), 'w') as fp:
    fp.write(json.dumps(manifestEnv, ensure_ascii=False))

  logger.debug(json.dumps(manifestEnv))
  pipelineJsonPath = "%s/%s_pipeline.json" % (buildWorkspace, args.pipeline)
  logger.info("Pipeline JSON path: %s", pipelineJsonPath)
  return pipelineJsonPath
Exemplo n.º 5
0
def main(args):
  """
    Main function for the pipeline. Executes all sub-tasks

    :param args: Parsed command line arguments
  """
  logger = initPipelineLogger("manifest", logLevel=args.logLevel)
  buildWorkspace = os.environ.get("BUILD_WORKSPACE", None)
  if not buildWorkspace:
    baseDir = jenkins.getWorkspace()
    buildId = jenkins.getBuildNumber()
    buildWorkspace = mkdtemp(prefix=buildId, dir=baseDir)

  manifest = vars(args)
  # Update buildWorkspace in manifest section for pipelineJson
  manifest.update({"buildWorkspace": buildWorkspace})
  manifestEnv = {"manifest": manifest}

  with open("%s/%s_pipeline.json" % (buildWorkspace, args.pipeline), 'w') as fp:
    fp.write(json.dumps(manifestEnv, ensure_ascii=False))

  logger.debug(json.dumps(manifestEnv))
  pipelineJsonPath = "%s/%s_pipeline.json" % (buildWorkspace, args.pipeline)
  logger.info("Pipeline JSON path: %s", pipelineJsonPath)
  return pipelineJsonPath
Exemplo n.º 6
0
def addAndParseArgs(jsonArgs):
  """
    Parse the command line arguments.

    :returns : pipeline, buildWorkspace, grokSha, pipelineParams.
  """
  parser = argparse.ArgumentParser(description="test tool to run Test for "
                                   "Grok. Provide parameters either "
                                   "via path for JSON file or commandline. "
                                   "Provinding both JSON parameter and as a "
                                   "commandline is prohibited. "
                                   "Use help for detailed information for "
                                   "parameters")
  parser.add_argument("--build-workspace", dest="buildWorkspace", type=str,
                      help="Common dir prefix for grok")
  parser.add_argument("--pipeline-json", dest="pipelineJson", type=str,
                      help="Path locator for build json file. This file should "
                      "have all parameters required by this script. Provide "
                      "parameters either as a command line parameters or as "
                      "individial parameters")
  parser.add_argument("--log", dest="logLevel", type=str, default="warning",
                      help="Logging level, optional parameter and defaulted to "
                      "level warning")

  args = {}
  if jsonArgs:
    args = jsonArgs
  else:
    args = vars(parser.parse_args())

  global g_logger
  g_logger = initPipelineLogger("run_tests", logLevel=args["logLevel"])

  g_logger.debug(args)
  saneParams = {k:v for k, v in args.items() if v is not None}

  del saneParams["logLevel"]

  if "pipelineJson" in saneParams and len(saneParams) > 1:
    parser.error("Please provide parameters via JSON file or commandline,"
                   "but not both")

  if "pipelineJson" in saneParams:
    with open(args["pipelineJson"]) as paramFile:
      pipelineParams = json.load(paramFile)
  else:
    pipelineParams = saneParams

  buildWorkspace = os.environ.get("BUILD_WORKSPACE",
                     pipelineParams.get("buildWorkspace",
                     pipelineParams.get("manifest", {}).get("buildWorkspace")))

  if buildWorkspace and pipelineParams:
    return (buildWorkspace, pipelineParams, args["pipelineJson"])
  else:
    parser.error("Please provide all parameters, "
                 "use --help for further details")
Exemplo n.º 7
0
def addAndParseArgs(jsonArgs):
    """
    Parse the command line arguments

    :returns Parsed object for the command-line arguments from sys.argv
    :rtype argparse.Namespace
  """
    parser = argparse.ArgumentParser(description="Tool to sync yum repository"
                                     "with S3")
    parser.add_argument("--repo-name",
                        dest="repoName",
                        type=str,
                        help="Name of the repository to be updated")
    parser.add_argument("--pipeline-json",
                        dest="pipelineJson",
                        type=str,
                        help="Path locator for build json file")
    parser.add_argument("--log",
                        dest="logLevel",
                        type=str,
                        default="warning",
                        help="Logging level")
    args = {}
    if jsonArgs:
        args = jsonArgs
    else:
        args = vars(parser.parse_args())

    global g_logger
    g_logger = logger.initPipelineLogger("sync_yum_repo_s3",
                                         logLevel=args["logLevel"])

    saneParams = {k: v for k, v in args.items() if v is not None}
    del saneParams["logLevel"]

    if "pipelineJson" in saneParams and len(saneParams) > 1:
        errorMessage = "Please provide parameters via JSON file or commandline"
        g_logger.error(errorMessage)
        parser.error(errorMessage)
    else:
        pipelineParams = {}
        if "pipelineJson" in saneParams:
            with open(args["pipelineJson"]) as paramFile:
                pipelineParams = json.load(paramFile)
        else:
            pipelineParams = saneParams
    repoName = pipelineParams.get(
        "repoName",
        pipelineParams.get("packageRpm", {}).get("repoName"))
    syncRpm = pipelineParams.get(
        "syncRpm",
        pipelineParams.get("packageRpm", {}).get("syncRpm"))
    if repoName:
        return (repoName, syncRpm, pipelineParams, args["pipelineJson"])
    else:
        parser.error("Please provide all parameters, "
                     "use --help for further details")
Exemplo n.º 8
0
def addAndParseArgs(jsonArgs):
  """
    Parse the command line arguments

    :returns Parsed object for the command-line arguments from sys.argv
    :rtype argparse.Namespace
  """
  parser = argparse.ArgumentParser(description="Tool to sync yum repository"
                                   "with S3")
  parser.add_argument("--repo-name", dest="repoName", type=str,
                      help="Name of the repository to be updated")
  parser.add_argument("--pipeline-json", dest="pipelineJson", type=str,
                      help="Path locator for build json file")
  parser.add_argument("--log", dest="logLevel", type=str, default="warning",
                      help="Logging level")
  args = {}
  if jsonArgs:
    args = jsonArgs
  else:
    args = vars(parser.parse_args())

  global g_logger
  g_logger = logger.initPipelineLogger("sync_yum_repo_s3",
                                       logLevel=args["logLevel"])

  saneParams = {k:v for k, v in args.items() if v is not None}
  del saneParams["logLevel"]

  if "pipelineJson" in saneParams and len(saneParams) > 1:
    errorMessage = "Please provide parameters via JSON file or commandline"
    g_logger.error(errorMessage)
    parser.error(errorMessage)
  else:
    pipelineParams = {}
    if "pipelineJson" in saneParams:
      with open(args["pipelineJson"]) as paramFile:
        pipelineParams = json.load(paramFile)
    else:
      pipelineParams = saneParams
  repoName = pipelineParams.get("repoName", pipelineParams.get("packageRpm",
                                {}).get("repoName"))
  syncRpm = pipelineParams.get("syncRpm", pipelineParams.get("packageRpm",
                                {}).get("syncRpm"))
  if repoName:
    return (repoName, syncRpm, pipelineParams, args["pipelineJson"])
  else:
    parser.error("Please provide all parameters, "
                 "use --help for further details")
Exemplo n.º 9
0
def main(args):
    """
  This function will terminate the stale instances running the regions
  passed as parameter to the script.
  Instances which satisfy following conditions will be terminated.
  - Name of the instance starts with 'vertis_'. Instances starting with
    'vertis_donotremove_' will not be terminated.
  - Instances which are running for more than three hours, and have blank
    'Name' tag.
  """

    logger = initPipelineLogger("janitor_ec2", logLevel=args.logLevel)

    awsAccessKeyId = os.environ.get("AWS_ACCESS_KEY_ID")
    awsScrectAccessKey = os.environ.get("AWS_SECRET_ACCESS_KEY")

    if not (awsAccessKeyId and awsScrectAccessKey):
        logger.error("Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
        raise MissingAWSKeysInEnvironment("AWS keys are not set")

    # required for terminateInstance function
    config = {}
    config["AWS_ACCESS_KEY_ID"] = awsAccessKeyId
    config["AWS_SECRET_ACCESS_KEY"] = awsScrectAccessKey
    for region in args.region:
        instances = [
            i.id for i in getInstances(region, awsAccessKeyId,
                                       awsScrectAccessKey, logger)
            if toBeTerminated(i, logger)
        ]
        if instances:
            config["REGION"] = region
            logger.info("Deleting {}".format(", ".join(instances)))
            for instance in instances:
                terminateInstance(instance, config, logger)
        else:
            logger.info("None of the instances are stale.")
Exemplo n.º 10
0
def addAndParseArgs(jsonArgs):
  """
    Parse, sanitize and process command line arguments.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson": <PIPELINE_JSON_PATH>,
            "logLevel": <LOG_LEVEL>}


    :returns: A dict containing releaseVersion, buildWorkspace, grokSha,
    deployTrack, grokDeployTrack, amiName (all strings).

    Example dict:
    {
      "releaseVersion": "1.7.0,
      "buildWorkspace": "/path/to/Workspace",
      "grokSha": "0xDEADBEEF",
      "deployTrack": "production",
      "grokDeployTrack": "production",
      "amiName": "grok-pipeline"
    }

    :rtype: dict of strings

    :raises MissingCommandLineArgument when missing expected parameters
    :raises ConflictingCommandLineArguments when pipelineJson and other
      conflicting parameters set
  """
  parser = argparse.ArgumentParser(description="Tool to bake AMI with "
                                   "given version of GROK")
  parser.add_argument("--grok-sha", dest="grokSha", type=str,
                      help="SHA from Grok used for this build")
  parser.add_argument("--deploy-track", dest="deployTrack", type=str,
                      help="Deploy track that should be used for tracking RPM")
  parser.add_argument("--track-rpm-version", dest="trackVersion", type=str,
                      help="Tracking RPM version")
  parser.add_argument("--release-version", dest="releaseVersion", type=str,
                      help="Current release version, this will be used as base")
  parser.add_argument("--grok-deploy-track", dest="grokDeployTrack", type=str,
                      help="Deploy track for grok RPM")
  parser.add_argument("--ami-name", dest="amiName", type=str,
                      help="Descriptive key to be used with auto generated ami"
                      "name")
  parser.add_argument("--build-workspace", dest="buildWorkspace", type=str,
                      help="Common dir prefix for grok")
  parser.add_argument("--pipeline-json", dest="pipelineJson", type=str,
                      help="Path locator for build json file. This file should"
                      "have all parameters required by this script. Provide"
                      "parameters either as a command line parameters or as"
                      "individial parameters")
  parser.add_argument("--grok-rpm-name", dest="grokRpmName",
                      type=str, help="Grok RPM name to installed on the AMI")
  parser.add_argument("--log", dest="logLevel", type=str, default="warning",
                      help="Logging level, optional parameter and defaulted to"
                      "level warning")

  args = {}
  if jsonArgs:
    args = jsonArgs
  else:
    args = vars(parser.parse_args())

  global g_logger
  g_logger = initPipelineLogger("bake_ami", logLevel=args["logLevel"])
  saneParams = {k:v for k, v in args.items() if v is not None}
  del saneParams["logLevel"]

  if "pipelineJson" in saneParams and len(saneParams) > 1:
    errorMessage = "Please provide parameters via JSON file or commandline"
    g_logger.error(errorMessage)
    parser.error(errorMessage)

  if "pipelineJson" in saneParams:
    with open(args["pipelineJson"]) as paramFile:
      pipelineParams = json.load(paramFile)
  else:
    pipelineParams = saneParams

  releaseVersion = pipelineParams.get("releaseVersion",
                     pipelineParams.get("manifest", {}).get("releaseVersion"))

  buildWorkspace = os.environ.get("BUILD_WORKSPACE",
                     pipelineParams.get("buildWorkspace",
                     pipelineParams.get("manifest", {}).get("buildWorkspace")))

  grokSha = pipelineParams.get("grokSha",
              pipelineParams.get("build", {}).get("grokSha"))
  deployTrack = pipelineParams.get("deployTrack",
                  pipelineParams.get("build", {}).get("deployTrack"))
  grokDeployTrack = pipelineParams.get("grokDeployTrack",
                      pipelineParams.get("build", {}).get("grokDeployTrack"))
  amiName = pipelineParams.get("amiName",
              pipelineParams.get("build", {}).get("amiName"))
  grokRpmName = pipelineParams.get("grokRpmName",
                  pipelineParams.get("packageRpm", {}).get("grokRpmName"))
  pipelineJson = args["pipelineJson"]
  if (releaseVersion and buildWorkspace and grokSha
      and deployTrack and grokDeployTrack  and amiName and grokRpmName):
    return {"releaseVersion": releaseVersion,
            "buildWorkspace": buildWorkspace,
            "grokSha": grokSha,
            "deployTrack": deployTrack,
            "grokDeployTrack": grokDeployTrack,
            "amiName": amiName,
            "grokRpmName": grokRpmName,
            "pipelineJson": pipelineJson}
  else:
    parser.error("Please provide all parameters, "
                 "Use --help for further details")
Exemplo n.º 11
0
def addAndParseArgs(jsonArgs):
    """
  This method parses the command line paramaters passed to the script.

  :returns: logger, buildWorkspace, YOMPSha, releaseVersion,
            pipelineParams, pipelineJson
  """

    parser = argparse.ArgumentParser(description="Package tool for creating"
                                     " YOMP rpms")
    parser.add_argument("--pipeline-json",
                        dest="pipelineJson",
                        type=str,
                        help="The manifest file name")
    parser.add_argument("--build-workspace",
                        dest="buildWorkspace",
                        type=str,
                        help="Common dir prefix for YOMP")
    parser.add_argument("--YOMPSha",
                        dest="YOMPSha",
                        type=str,
                        help="The YOMPSha for which are creating rpm")
    parser.add_argument("--YOMP-remote",
                        dest="YOMPRemote",
                        type=str,
                        help="The YOMP remote you want to use, "
                        "e.g. [email protected]:Numenta/numenta-apps.YOMP")
    parser.add_argument("--unit-test-status",
                        dest="testStatus",
                        type=str,
                        help="Unit test success status")
    parser.add_argument(
        "--release-version",
        dest="releaseVersion",
        type=str,
        help="Current release version, this will be used as base"
        "version for YOMP and tracking rpm")
    parser.add_argument("--log",
                        dest="logLevel",
                        type=str,
                        default="warning",
                        help="Logging level")

    args = {}
    if jsonArgs:
        args = jsonArgs
    else:
        args = vars(parser.parse_args())
    global g_logger

    g_logger = log.initPipelineLogger("packaging", logLevel=args["logLevel"])

    saneParams = {k: v for k, v in args.items() if v is not None}
    del saneParams["logLevel"]

    if "pipelineJson" in saneParams and len(saneParams) > 1:
        parser.error("Please provide parameters via JSON file or commandline,"
                     "but not both")

    if "pipelineJson" in saneParams:
        with open(args["pipelineJson"]) as paramFile:
            pipelineParams = json.load(paramFile)
    else:
        pipelineParams = saneParams

    g_logger.info("pipeline parameters:%s", pipelineParams)

    buildWorkspace = os.environ.get(
        "BUILD_WORKSPACE",
        pipelineParams.get(
            "buildWorkspace",
            pipelineParams.get("manifest", {}).get("buildWorkspace")))
    YOMPSha = pipelineParams.get(
        "YOMPSha",
        pipelineParams.get("build", {}).get("YOMPSha"))
    unitTestStatus = pipelineParams.get(
        "testStatus",
        pipelineParams.get("test", {}).get("testStatus"))
    releaseVersion = pipelineParams.get(
        "releaseVersion",
        pipelineParams.get("manifest", {}).get("releaseVersion"))
    YOMPRemote = pipelineParams.get(
        "YOMPRemote",
        pipelineParams.get("manifest", {}).get("YOMPRemote"))

    if platform.system() not in "Linux":
        g_logger.error(
            "RPM's will be built only on Linux (CentOS). Bailing out.")
        raise exceptions.FailedToCreateRPMOnNonLinuxBox("RPM's will not build")

    if not unitTestStatus:
        g_logger.error("Unit Test failed. RPM's will not be created.")
        raise exceptions.UnittestFailed("Unit Test failed")

    if buildWorkspace and YOMPSha and YOMPRemote:
        return (buildWorkspace, YOMPSha, releaseVersion, pipelineParams,
                args["pipelineJson"], YOMPRemote)
    else:
        parser.error("Please provide all parameters, "
                     "Use --help for further details")
def main():
  """
    This is the main class.
  """
  args = parseArgs()


  global g_logger
  g_logger = initPipelineLogger("run-integration-tests", logLevel=args.logLevel)

  if not (os.environ.get("AWS_ACCESS_KEY_ID") and
          os.environ.get("AWS_SECRET_ACCESS_KEY")):
    g_logger.error("Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
    raise MissingAWSKeysInEnvironment("AWS keys are not set")
  else:
    g_config["AWS_ACCESS_KEY_ID"] = os.environ["AWS_ACCESS_KEY_ID"]
    g_config["AWS_SECRET_ACCESS_KEY"] = os.environ["AWS_SECRET_ACCESS_KEY"]

  #launching instance with the give AMI
  publicDnsName, instanceId = launchInstance(args.ami, g_config, g_logger)

  resultsDir = prepareResultsDir()
  serverKey = os.path.join("~", ".ssh", g_config["KEY"] + ".pem")

  # The calls in this function are not signal-safe. However, the expectation is
  # that making them signal safe would be overly burdensome at this time. If
  # issues arise later, then we'll figure out what the right approach is at that
  # time.
  def handleSignalInterrupt(signal, _frame):
    g_logger.error("Received interrupt signal %s", signal)
    if instanceId:
      g_logger.error("Terminating instance %s", instanceId)
      terminateInstance(instanceId, g_config, g_logger)

  signal.signal(signal.SIGINT, handleSignalInterrupt)
  signal.signal(signal.SIGTERM, handleSignalInterrupt)

  with settings(host_string=publicDnsName,
                key_filename=serverKey,
                user=g_config["USER"], connection_attempts=30, warn_only=True):
    g_logger.info("Connected to %s using %s.pem", publicDnsName, serverKey)
    # Run Integration tests
    try:
      waitForGrokServerToBeReady(publicDnsName, serverKey, g_config["USER"],
                                 g_logger)
      getApiKey(instanceId, publicDnsName, g_config, g_logger)
      # TODO remove the exports; keeping them intact for now because some of the
      # integration tests use the ConfigAttributePatch which reads these values
      # from environment.
      runTestCommand = ("export AWS_ACCESS_KEY_ID=%s"
                        % os.environ["AWS_ACCESS_KEY_ID"] +
                        " && export AWS_SECRET_ACCESS_KEY=%s"
                        % os.environ["AWS_SECRET_ACCESS_KEY"] +
                        " && source /etc/grok/supervisord.vars" +
                        " && cd $GROK_HOME" +
                        " && ./run_tests.sh --ami --integration" +
                        " --language py --results xunit jenkins")
      run(runTestCommand)
      g_logger.debug("Retreiving results")
      get("%s" % (g_remotePath), resultsDir)
    except Exception:
      g_logger.exception("Caught exception in run_tests")
      stopInstance(instanceId, g_config, g_logger)
      raise
    else:
      g_logger.info("Tests have finished.")
      successStatus = analyzeResults("%s/results.xml" % resultsDir)
      if args.pipelineJson:
        with open(args.pipelineJson) as jsonFile:
          pipelineParams = json.load(jsonFile)

        pipelineParams["integration_test"] = {"testStatus": successStatus}
        with open(args.pipelineJson, "w") as jsonFile:
          jsonFile.write(json.dumps(pipelineParams, ensure_ascii=False))

      if successStatus:
        postTestRunAction(instanceId, terminate=True, **g_config)
      else:
        postTestRunAction(instanceId, terminate=False, **g_config)
        raise TestsFailed("Integration test failed")
Exemplo n.º 13
0
def addAndParseArgs(jsonArgs):
  """
    Parse the command line arguments or a json blog containing the required
    values.

    :returns: A dict of the parameters needed, as follows:
      {
        "buildWorkspace": "/path/to/build/in",
        "grokRemote": "[email protected]:Numenta/numenta-apps.git",
        "grokBranch": "master",
        "grokSha": "HEAD",
        "nupicRemote": "[email protected]:numenta/nupic.git",
        "nupicBranch": "master",
        "nupicSha": "HEAD",
        "pipelineParams": "{dict of parameters}",
        "pipelineJson": "/path/to/json/file"
      }

    :rtype: dict

    :raises parser.error in case wrong combination of arguments or arguments
      are missing.
  """
  parser = argparse.ArgumentParser(description="build tool for NuPIC and Grok")
  parser.add_argument("--pipeline-json", dest="pipelineJson", type=str,
                      help="The JSON file generated by manifest tool.")
  parser.add_argument("--build-workspace", dest="buildWorkspace", type=str,
                      help="Common dir prefix for Grok and NuPIC")
  parser.add_argument("--grok-remote", dest="grokRemote", type=str,
                      help="The grok remote you want to use, e.g.,  "
                           "[email protected]:Numenta/numenta-apps.git")
  parser.add_argument("--grok-sha", dest="grokSha", type=str,
                      help="Grok SHA that will be built")
  parser.add_argument("--grok-branch", dest="grokBranch", type=str,
                      help="The branch you are building from")
  parser.add_argument("--nupic-remote", dest="nupicRemote", type=str,
                      help="The nupic remote you want to use,"
                           "e.g., [email protected]:numenta/nupic.git")
  parser.add_argument("--nupic-branch", dest="nupicBranch", type=str,
                      help="The NuPIC branch to add in deploy track")
  parser.add_argument("--nupic-sha", dest="nupicSha", type=str,
                      help="NuPIC SHA that will be built.")
  parser.add_argument("--release-version", dest="releaseVersion", type=str,
                      help="Current release version, this will be used as base"
                           "version for grok, NuPIC and tracking rpm")
  parser.add_argument("--log", dest="logLevel", type=str, default="warning",
                      help="Logging level, by default it takes warning")

  args = {}
  if jsonArgs:
    args = jsonArgs
  else:
    args = vars(parser.parse_args())

  global g_logger
  g_logger = log.initPipelineLogger("build", logLevel=args["logLevel"])
  saneParams = {k:v for k, v in args.items() if v is not None}
  del saneParams["logLevel"]

  if "pipelineJson" in saneParams and len(saneParams) > 1:
    errorMessage = "Please provide parameters via JSON file or commandline"
    parser.error(errorMessage)

  if "pipelineJson" in saneParams:
    with open(args["pipelineJson"]) as paramFile:
      pipelineParams = json.load(paramFile)
  else:
    pipelineParams = saneParams

  # Setup defaults
  pipelineConfig = {
    "buildWorkspace": None,
    "grokRemote": "[email protected]:Numenta/numenta-apps.git",
    "grokBranch": "master",
    "grokSha": "HEAD",
    "nupicRemote": "[email protected]:numenta/nupic.git",
    "nupicBranch": "master",
    "nupicSha": None,
    "pipelineParams": pipelineParams,
    "pipelineJson": None
  }

  pipelineConfig["buildWorkspace"] = os.environ.get("BUILD_WORKSPACE",
                    pipelineParams.get("buildWorkspace",
                      pipelineParams.get("manifest", {}).get("buildWorkspace")))
  if not pipelineConfig["buildWorkspace"]:
    parser.error("You must set a BUILD_WORKSPACE environment variable "
                 "or pass the --build-workspace argument via the command line "
                 "or json file.")

  pipelineConfig["grokRemote"] = pipelineParams.get("grokRemote",
                          pipelineParams.get("manifest", {}).get("grokRemote"))
  pipelineConfig["grokBranch"] = pipelineParams.get("grokBranch",
                          pipelineParams.get("manifest", {}).get("grokBranch"))
  pipelineConfig["grokSha"] = pipelineParams.get("grokSha",
                          pipelineParams.get("manifest", {}).get("grokSha"))

  pipelineConfig["nupicRemote"] = pipelineParams.get("nupicRemote",
                          pipelineParams.get("manifest", {}).get("nupicRemote"))
  pipelineConfig["nupicBranch"] = pipelineParams.get("nupicBranch",
                          pipelineParams.get("manifest", {}).get("nupicBranch"))
  pipelineConfig["nupicSha"] = pipelineParams.get("nupicSha",
                          pipelineParams.get("manifest", {}).get("nupicSha"))

  pipelineConfig["pipelineJson"] = args["pipelineJson"]

  return pipelineConfig
Exemplo n.º 14
0
def addAndParseArgs(jsonArgs):
  """
  This method parses the command line paramaters passed to the script.

  :returns: logger, buildWorkspace, nupicBuildDir,
            grokSha, releaseVersion,
            pipelineParams, pipelineJson
  """

  parser = argparse.ArgumentParser(description="Package tool for creating"
                                               " grok and NuPIC rpms")
  parser.add_argument("--pipeline-json", dest="pipelineJson", type=str,
                      help="The manifest file name")
  parser.add_argument("--build-workspace", dest="buildWorkspace", type=str,
                      help="Common dir prefix for grok and NuPIC")
  parser.add_argument("--nupic-build-dir", dest="nupicBuildDir", type=str,
                      help="nupic build dir where NuPIC is build")
  parser.add_argument("--grokSha", dest="grokSha", type=str,
                      help="The grokSha for which are creating rpm")
  parser.add_argument("--grok-remote", dest="grokRemote", type=str,
                      help="The grok remote you want to use, "
                           "e.g. [email protected]:Numenta/numenta-apps.git")
  parser.add_argument("--unit-test-status", dest="testStatus", type=str,
                      help="Unit test success status")
  parser.add_argument("--release-version", dest="releaseVersion", type=str,
                       help="Current release version, this will be used as base"
                       "version for grok, nupic and tracking rpm")
  parser.add_argument("--log", dest="logLevel", type=str, default="warning",
                      help="Logging level")

  args = {}
  if jsonArgs:
    args = jsonArgs
  else:
    args = vars(parser.parse_args())
  global g_logger

  g_logger = log.initPipelineLogger("packaging", logLevel=args["logLevel"])

  saneParams = {k:v for k, v in args.items() if v is not None}
  del saneParams["logLevel"]

  if "pipelineJson" in saneParams and len(saneParams) > 1:
    parser.error("Please provide parameters via JSON file or commandline,"
                   "but not both")

  if "pipelineJson" in saneParams:
    with open(args["pipelineJson"]) as paramFile:
      pipelineParams = json.load(paramFile)
  else:
    pipelineParams = saneParams

  g_logger.info("pipeline parameters:%s", pipelineParams)

  buildWorkspace = os.environ.get("BUILD_WORKSPACE",
                     pipelineParams.get("buildWorkspace",
                     pipelineParams.get("manifest", {}).get("buildWorkspace")))
  nupicBuildDir = pipelineParams.get("nupicBuildDir",
                    pipelineParams.get("build", {}).get("nupicBuildDir"))
  grokSha = pipelineParams.get("grokSha",
              pipelineParams.get("build", {}).get("grokSha"))
  unitTestStatus = pipelineParams.get("testStatus",
                    pipelineParams.get("test", {}).get("testStatus"))
  releaseVersion = pipelineParams.get("releaseVersion",
                    pipelineParams.get("manifest", {}).get("releaseVersion"))
  grokRemote = pipelineParams.get("grokRemote",
                    pipelineParams.get("manifest", {}).get("grokRemote"))

  if platform.system() not in "Linux":
    g_logger.error("RPM's will be built only on Linux (CentOS). Bailing out.")
    raise exceptions.FailedToCreateRPMOnNonLinuxBox("RPM's will not build")

  if not unitTestStatus:
    g_logger.error("Unit Test failed. RPM's will not be created.")
    raise exceptions.UnittestFailed("Unit Test failed")

  if buildWorkspace and nupicBuildDir and grokSha and grokRemote:
    return (buildWorkspace, nupicBuildDir, grokSha, releaseVersion,
            pipelineParams, args["pipelineJson"], grokRemote)
  else:
    parser.error("Please provide all parameters, "
                 "Use --help for further details")
Exemplo n.º 15
0
def addAndParseArgs(jsonArgs):
    """
    Parse the command line arguments or a json blog containing the required
    values.

    :returns: A dict of the parameters needed, as follows:
      {
        "buildWorkspace": "/path/to/build/in",
        "YOMPRemote": "[email protected]:Numenta/numenta-apps.YOMP",
        "YOMPBranch": "master",
        "YOMPSha": "HEAD",
        "pipelineParams": "{dict of parameters}",
        "pipelineJson": "/path/to/json/file"
      }

    :rtype: dict

    :raises parser.error in case wrong combination of arguments or arguments
      are missing.
  """
    parser = argparse.ArgumentParser(description="build tool for YOMP")
    parser.add_argument("--pipeline-json",
                        dest="pipelineJson",
                        type=str,
                        help="The JSON file generated by manifest tool.")
    parser.add_argument("--build-workspace",
                        dest="buildWorkspace",
                        type=str,
                        help="Common dir prefix for YOMP")
    parser.add_argument("--YOMP-remote",
                        dest="YOMPRemote",
                        type=str,
                        help="The YOMP remote you want to use, e.g.,  "
                        "[email protected]:Numenta/numenta-apps.YOMP")
    parser.add_argument("--YOMP-sha",
                        dest="YOMPSha",
                        type=str,
                        help="YOMP SHA that will be built")
    parser.add_argument("--YOMP-branch",
                        dest="YOMPBranch",
                        type=str,
                        help="The branch you are building from")
    parser.add_argument(
        "--release-version",
        dest="releaseVersion",
        type=str,
        help="Current release version, this will be used as base"
        "version for YOMP and tracking rpm")
    parser.add_argument("--log",
                        dest="logLevel",
                        type=str,
                        default="warning",
                        help="Logging level, by default it takes warning")

    args = {}
    if jsonArgs:
        args = jsonArgs
    else:
        args = vars(parser.parse_args())

    global g_logger
    g_logger = log.initPipelineLogger("build", logLevel=args["logLevel"])
    saneParams = {k: v for k, v in args.items() if v is not None}
    del saneParams["logLevel"]

    if "pipelineJson" in saneParams and len(saneParams) > 1:
        errorMessage = "Please provide parameters via JSON file or commandline"
        parser.error(errorMessage)

    if "pipelineJson" in saneParams:
        with open(args["pipelineJson"]) as paramFile:
            pipelineParams = json.load(paramFile)
    else:
        pipelineParams = saneParams

    # Setup defaults
    pipelineConfig = {
        "buildWorkspace": None,
        "YOMPRemote": "[email protected]:Numenta/numenta-apps.YOMP",
        "YOMPBranch": "master",
        "YOMPSha": "HEAD",
        "pipelineParams": pipelineParams,
        "pipelineJson": None
    }

    pipelineConfig["buildWorkspace"] = os.environ.get(
        "BUILD_WORKSPACE",
        pipelineParams.get(
            "buildWorkspace",
            pipelineParams.get("manifest", {}).get("buildWorkspace")))
    if not pipelineConfig["buildWorkspace"]:
        parser.error(
            "You must set a BUILD_WORKSPACE environment variable "
            "or pass the --build-workspace argument via the command line "
            "or json file.")

    pipelineConfig["YOMPRemote"] = pipelineParams.get(
        "YOMPRemote",
        pipelineParams.get("manifest", {}).get("YOMPRemote"))
    pipelineConfig["YOMPBranch"] = pipelineParams.get(
        "YOMPBranch",
        pipelineParams.get("manifest", {}).get("YOMPBranch"))
    pipelineConfig["YOMPSha"] = pipelineParams.get(
        "YOMPSha",
        pipelineParams.get("manifest", {}).get("YOMPSha"))

    pipelineConfig["pipelineJson"] = args["pipelineJson"]

    return pipelineConfig
Exemplo n.º 16
0
def addAndParseArgs(jsonArgs):
    """
    Parse, sanitize and process command line arguments.

    :param jsonArgs: dict of pipeline-json and logLevel, defaults to empty
      dict to make the script work independently and via driver scripts.
      e.g. {"pipelineJson": <PIPELINE_JSON_PATH>,
            "logLevel": <LOG_LEVEL>}


    :returns: A dict containing releaseVersion, buildWorkspace, YOMPSha,
    deployTrack, YOMPDeployTrack, amiName (all strings).

    Example dict:
    {
      "releaseVersion": "1.7.0,
      "buildWorkspace": "/path/to/Workspace",
      "YOMPSha": "0xDEADBEEF",
      "deployTrack": "production",
      "YOMPDeployTrack": "production",
      "amiName": "YOMP-pipeline"
    }

    :rtype: dict of strings

    :raises MissingCommandLineArgument when missing expected parameters
    :raises ConflictingCommandLineArguments when pipelineJson and other
      conflicting parameters set
  """
    parser = argparse.ArgumentParser(description="Tool to bake AMI with "
                                     "given version of YOMP")
    parser.add_argument("--YOMP-sha",
                        dest="YOMPSha",
                        type=str,
                        help="SHA from YOMP used for this build")
    parser.add_argument(
        "--release-version",
        dest="releaseVersion",
        type=str,
        help="Current release version, this will be used as base")
    parser.add_argument("--YOMP-deploy-track",
                        dest="YOMPDeployTrack",
                        type=str,
                        help="Deploy track for YOMP RPM")
    parser.add_argument(
        "--ami-name",
        dest="amiName",
        type=str,
        help="Descriptive key to be used with auto generated ami"
        "name")
    parser.add_argument("--build-workspace",
                        dest="buildWorkspace",
                        type=str,
                        help="Common dir prefix for YOMP")
    parser.add_argument(
        "--pipeline-json",
        dest="pipelineJson",
        type=str,
        help="Path locator for build json file. This file should"
        "have all parameters required by this script. Provide"
        "parameters either as a command line parameters or as"
        "individial parameters")
    parser.add_argument(
        "--log",
        dest="logLevel",
        type=str,
        default="warning",
        help="Logging level, optional parameter and defaulted to"
        "level warning")

    args = {}
    if jsonArgs:
        args = jsonArgs
    else:
        args = vars(parser.parse_args())

    global g_logger
    g_logger = initPipelineLogger("bake_ami", logLevel=args["logLevel"])
    saneParams = {k: v for k, v in args.items() if v is not None}
    del saneParams["logLevel"]

    if "pipelineJson" in saneParams and len(saneParams) > 1:
        errorMessage = "Please provide parameters via JSON file or commandline"
        g_logger.error(errorMessage)
        parser.error(errorMessage)

    if "pipelineJson" in saneParams:
        with open(args["pipelineJson"]) as paramFile:
            pipelineParams = json.load(paramFile)
    else:
        pipelineParams = saneParams

    releaseVersion = pipelineParams.get(
        "releaseVersion",
        pipelineParams.get("manifest", {}).get("releaseVersion"))

    buildWorkspace = os.environ.get(
        "BUILD_WORKSPACE",
        pipelineParams.get(
            "buildWorkspace",
            pipelineParams.get("manifest", {}).get("buildWorkspace")))

    YOMPSha = pipelineParams.get(
        "YOMPSha",
        pipelineParams.get("build", {}).get("YOMPSha"))
    amiName = pipelineParams.get(
        "amiName",
        pipelineParams.get("build", {}).get("amiName"))
    pipelineJson = args["pipelineJson"]
    if releaseVersion and buildWorkspace and YOMPSha and amiName:
        return {
            "releaseVersion": releaseVersion,
            "buildWorkspace": buildWorkspace,
            "YOMPSha": YOMPSha,
            "amiName": amiName,
            "pipelineJson": pipelineJson
        }
    else:
        parser.error("Please provide all parameters, "
                     "Use --help for further details")
def main():
  """
    This is the main class.
  """
  args = parseArgs()

  global g_logger
  g_logger = initPipelineLogger("run-integration-tests", logLevel=args.logLevel)

  if not (os.environ.get("AWS_ACCESS_KEY_ID") and
          os.environ.get("AWS_SECRET_ACCESS_KEY")):
    g_logger.error("Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
    raise MissingAWSKeysInEnvironment("AWS keys are not set")
  else:
    g_config["AWS_ACCESS_KEY_ID"] = os.environ["AWS_ACCESS_KEY_ID"]
    g_config["AWS_SECRET_ACCESS_KEY"] = os.environ["AWS_SECRET_ACCESS_KEY"]

  #launching instance with the give AMI
  publicDnsName, instanceId = launchInstance(args.ami, g_config, g_logger)

  resultsDir = prepareResultsDir()
  serverKey = os.path.join("~", ".ssh", g_config["KEY"] + ".pem")

  # The calls in this function are not signal-safe. However, the expectation is
  # that making them signal safe would be overly burdensome at this time. If
  # issues arise later, then we'll figure out what the right approach is at that
  # time.
  def handleSignalInterrupt(signal, _frame):
    g_logger.error("Received interrupt signal %s", signal)
    if instanceId:
      g_logger.error("Terminating instance %s", instanceId)
      terminateInstance(instanceId, g_config, g_logger)

  signal.signal(signal.SIGINT, handleSignalInterrupt)
  signal.signal(signal.SIGTERM, handleSignalInterrupt)

  with settings(host_string=publicDnsName,
                key_filename=serverKey,
                user=g_config["USER"], connection_attempts=30, warn_only=True):
    g_logger.info("Connected to %s using %s.pem", publicDnsName, serverKey)
    # Run Integration tests
    try:
      waitForGrokServerToBeReady(publicDnsName, serverKey, g_config["USER"],
                                 g_logger)
      getApiKey(instanceId, publicDnsName, g_config, g_logger)
      # TODO remove the exports; keeping them intact for now because some of the
      # integration tests use the ConfigAttributePatch which reads these values
      # from environment.
      runTestCommand = ("export AWS_ACCESS_KEY_ID=%s"
                        % os.environ["AWS_ACCESS_KEY_ID"] +
                        " && export AWS_SECRET_ACCESS_KEY=%s"
                        % os.environ["AWS_SECRET_ACCESS_KEY"] +
                        " && source /etc/grok/supervisord.vars" +
                        " && cd $GROK_HOME" +
                        " && ./run_tests.sh --integration --language py" +
                        " --results xunit jenkins")
      run(runTestCommand)
      g_logger.debug("Retreiving results")
      get("%s" % (g_remotePath), resultsDir)
    except Exception:
      g_logger.exception("Caught exception in run_tests")
      stopInstance(instanceId, g_config, g_logger)
      raise
    else:
      g_logger.info("Tests have finished.")

      # Rename the results file to be job specific
      newResultsFile = "grok_integration_test_results_%s.xml" % getBuildNumber()
      if os.path.exists(os.path.join(resultsDir, "results.xml")):
        shutil.move(os.path.join(resultsDir, "results.xml"),
                    os.path.join(resultsDir, newResultsFile))
      if os.path.exists(os.path.join(resultsDir, newResultsFile)):
        successStatus = analyzeResults("%s/%s" % (resultsDir, newResultsFile))
      else:
        g_logger.error("Could not find results file: %s", newResultsFile)
        successStatus = False

      if args.pipelineJson:
        with open(args.pipelineJson) as jsonFile:
          pipelineParams = json.load(jsonFile)

        pipelineParams["integration_test"] = {"testStatus": successStatus}
        with open(args.pipelineJson, "w") as jsonFile:
          jsonFile.write(json.dumps(pipelineParams, ensure_ascii=False))

      if successStatus:
        postTestRunAction(instanceId, terminate=True, **g_config)
      else:
        postTestRunAction(instanceId, terminate=False, **g_config)
        raise TestsFailed("Integration test failed")
Exemplo n.º 18
0
def addAndParseArgs(jsonArgs):
    """
    Parse the command line arguments.

    :returns : pipeline, buildWorkspace, grokSha, pipelineParams.
  """
    parser = argparse.ArgumentParser(description="test tool to run Test for "
                                     "Grok. Provide parameters either "
                                     "via path for JSON file or commandline. "
                                     "Provinding both JSON parameter and as a "
                                     "commandline is prohibited. "
                                     "Use help for detailed information for "
                                     "parameters")
    parser.add_argument("--trigger-pipeline",
                        dest="pipeline",
                        type=str,
                        help="Repository name which triggered this pipeline",
                        choices=["grok"])
    parser.add_argument("--build-workspace",
                        dest="buildWorkspace",
                        type=str,
                        help="Common dir prefix for grok")
    parser.add_argument("--grok-sha",
                        dest="grokSha",
                        type=str,
                        help="SHA from Grok used for this build")
    parser.add_argument(
        "--pipeline-json",
        dest="pipelineJson",
        type=str,
        help="Path locator for build json file. This file should "
        "have all parameters required by this script. Provide "
        "parameters either as a command line parameters or as "
        "individial parameters")
    parser.add_argument(
        "--log",
        dest="logLevel",
        type=str,
        default="warning",
        help="Logging level, optional parameter and defaulted to "
        "level warning")

    args = {}
    if jsonArgs:
        args = jsonArgs
    else:
        args = vars(parser.parse_args())

    global g_logger
    g_logger = initPipelineLogger("run_tests", logLevel=args["logLevel"])

    g_logger.debug(args)
    saneParams = {k: v for k, v in args.items() if v is not None}

    del saneParams["logLevel"]

    if "pipelineJson" in saneParams and len(saneParams) > 1:
        parser.error("Please provide parameters via JSON file or commandline,"
                     "but not both")

    if "pipelineJson" in saneParams:
        with open(args["pipelineJson"]) as paramFile:
            pipelineParams = json.load(paramFile)
    else:
        pipelineParams = saneParams

    pipeline = pipelineParams.get(
        "pipeline",
        pipelineParams.get("manifest", {}).get("pipeline"))
    buildWorkspace = os.environ.get(
        "BUILD_WORKSPACE",
        pipelineParams.get(
            "buildWorkspace",
            pipelineParams.get("manifest", {}).get("buildWorkspace")))
    grokSha = pipelineParams.get(
        "grokSha",
        pipelineParams.get("build", {}).get("grokSha"))

    if pipeline and buildWorkspace and grokSha:
        return (pipeline, buildWorkspace, grokSha, pipelineParams,
                args["pipelineJson"])
    else:
        parser.error("Please provide all parameters, "
                     "use --help for further details")
Exemplo n.º 19
0
def parseArgs():
  """
    Parse the command line arguments

    :return: Parsed arguments
    :rtype argparse.Namespace
  """
  parser = argparse.ArgumentParser(description="RPM Creator")
  parser.add_argument("--source-dir", action="store", dest="source_dir")
  parser.add_argument("--YOMP-url",
                      action="store",
                      dest="YOMPURL",
                      help="YOMP repository to package")
  parser.add_argument("--depends",
                      action="store",
                      dest="depends",
                      help="comma separated dependency list",
                      default=None)
  parser.add_argument("--package-name",
                      action="store",
                      dest="package_name",
                      help="rpm package name - don't include SHA or version")
  parser.add_argument("--repo-directory",
                      action="store",
                      dest="repo_directory",
                      help="name you want repo checked out as")
  parser.add_argument("--install-directory",
                      action="store",
                      dest="install_directory",
                      default='/opt',
                      help="where to install on target systems - default /opt")
  parser.add_argument("--sha", action="store", dest="sha", default=None)
  parser.add_argument("--base-version",
                      action="store",
                      dest="base_version",
                      default="0.1")
  parser.add_argument("--debug", action="store", dest="debug", default=0)
  parser.add_argument("--epoch", action="store", dest="epoch", default=0)
  parser.add_argument("--no-clean",
                      action="store",
                      dest="no_clean",
                      default=None)
  parser.add_argument("--arch", action="store", dest="arch", default=None)
  parser.add_argument("--desc",
                      action="store",
                      nargs='+',
                      dest="desc",
                      default=None)
  parser.add_argument("--directory-purge-list",
                      action="store",
                      dest="directory_purge_list",
                      default=None)
  parser.add_argument("--timestamp", action="store", dest="timestamp")
  parser.add_argument("--use-YOMP-tags", action="store", dest="useGitTag",
                      help="read version data from the repo's YOMP tags")
  parser.add_argument("--release", action="store", dest="release", default=0)
  parser.add_argument("--rpm-directory",
                      action="store",
                      dest="rpm_directory",
                      help="directory to put output rpm in")
  parser.add_argument("--workdir", action="store", dest="work_dir",
                      default="/opt/numenta/scratch",
                      help="The directory you want fpm to do its work in, where"
                           "'work' is any filecopying, downloading, etc."
                           "Roughly any scratch space fpm needs to build "
                           "your package.")
  parser.add_argument("--after-install", action="store",
                      dest="after_install", default=None,
                      help="post install script after rpm is installed")
  parser.add_argument("--log", dest="logLevel", type=str, default="debug",
                      help="Logging level")
  args = parser.parse_args()

  global g_logger
  # Intializing logger
  g_logger = log.initPipelineLogger("rpm-creator", logLevel=args.logLevel)

  if (not args.YOMPURL) and (not args.source_dir):
    parser.error("You must specify a repo to clone with --clone-source, or a"
                   "source directory with --source-directory")
  if not args.install_directory:
    parser.error("Specify a directory to install the repo into with"
                 "--install-directory, e.g. /opt")
  if not args.repo_directory:
    parser.error("Please specify a base directory with --repo-directory,"
                 "e.g. YOMP")
  if not args.package_name:
    parser.error("You must specify a package name with --package-name")
  if not args.base_version and not args.useGitTag:
    parser.error("Either specify a base version or --use-YOMP-tags to load"
                 "from the, repo YOMP tags")
  if args.YOMPURL and args.source_dir:
    parser.error("--clone-source and --source-dir are incompatible "
                 "with each other")
  if args.useGitTag and args.source_dir:
    parser.error("--use-YOMP-tags and --source-dir are incompatible "
                 "with each other")
  if args.timestamp:
    VERSION_TIMESTAMP = args.timestamp
  else:
    VERSION_TIMESTAMP = time.strftime("%Y%m%d.%H.%M.%S")

  return args