Esempio n. 1
0
def postTestRunAction(instanceId, terminate=True, **config):
    """
    Terminate or stop the instance. If the tests fail then
    just stop the instance or terminate it.

    :param instanceID: InstanceID of instance launched.
    :terminate: if True terminate the instance else stop instance.
    :config: dict of config values
  """
    if terminate:
        g_logger.info("Terminating instance %s", instanceId)
        terminateInstance(instanceId, config, g_logger)
    else:
        g_logger.info("Stopping instance %s", instanceId)
        stopInstance(instanceId, config, g_logger)
def postTestRunAction(instanceId, terminate=True, **config):
  """
    Terminate or stop the instance. If the tests fail then
    just stop the instance or terminate it.

    :param instanceID: InstanceID of instance launched.
    :terminate: if True terminate the instance else stop instance.
    :config: dict of config values
  """
  if terminate:
    g_logger.info("Terminating instance %s", instanceId)
    terminateInstance(instanceId, config, g_logger)
  else:
    g_logger.info("Stopping instance %s", instanceId)
    stopInstance(instanceId, config, g_logger)
Esempio n. 3
0
def main():
    """
    This is the main class.
  """
    args = parseArgs()

    global g_logger
    g_logger = initPipelineLogger("run-integration-tests",
                                  logLevel=args.logLevel)

    if not (os.environ.get("AWS_ACCESS_KEY_ID")
            and os.environ.get("AWS_SECRET_ACCESS_KEY")):
        g_logger.error(
            "Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
        raise MissingAWSKeysInEnvironment("AWS keys are not set")
    else:
        g_config["AWS_ACCESS_KEY_ID"] = os.environ["AWS_ACCESS_KEY_ID"]
        g_config["AWS_SECRET_ACCESS_KEY"] = os.environ["AWS_SECRET_ACCESS_KEY"]

    #launching instance with the give AMI
    publicDnsName, instanceId = launchInstance(args.ami, g_config, g_logger)

    resultsDir = prepareResultsDir()
    serverKey = os.path.join("~", ".ssh", g_config["KEY"] + ".pem")

    # The calls in this function are not signal-safe. However, the expectation is
    # that making them signal safe would be overly burdensome at this time. If
    # issues arise later, then we'll figure out what the right approach is at that
    # time.
    def handleSignalInterrupt(signal, _frame):
        g_logger.error("Received interrupt signal %s", signal)
        if instanceId:
            g_logger.error("Terminating instance %s", instanceId)
            terminateInstance(instanceId, g_config, g_logger)

    signal.signal(signal.SIGINT, handleSignalInterrupt)
    signal.signal(signal.SIGTERM, handleSignalInterrupt)

    with settings(host_string=publicDnsName,
                  key_filename=serverKey,
                  user=g_config["USER"],
                  connection_attempts=30,
                  warn_only=True):
        g_logger.info("Connected to %s using %s.pem", publicDnsName, serverKey)
        # Run Integration tests
        try:
            waitForGrokServerToBeReady(publicDnsName, serverKey,
                                       g_config["USER"], g_logger)
            getApiKey(instanceId, publicDnsName, g_config, g_logger)
            # TODO remove the exports; keeping them intact for now because some of the
            # integration tests use the ConfigAttributePatch which reads these values
            # from environment.
            runTestCommand = (
                "export AWS_ACCESS_KEY_ID=%s" % os.environ["AWS_ACCESS_KEY_ID"]
                + " && export AWS_SECRET_ACCESS_KEY=%s" %
                os.environ["AWS_SECRET_ACCESS_KEY"] +
                " && source /etc/grok/supervisord.vars" + " && cd $GROK_HOME" +
                " && ./run_tests.sh --integration --language py" +
                " --results xunit jenkins")
            run(runTestCommand)
            g_logger.debug("Retreiving results")
            get("%s" % (g_remotePath), resultsDir)
        except Exception:
            g_logger.exception("Caught exception in run_tests")
            stopInstance(instanceId, g_config, g_logger)
            raise
        else:
            g_logger.info("Tests have finished.")

            # Rename the results file to be job specific
            newResultsFile = "grok_integration_test_results_%s.xml" % getBuildNumber(
            )
            if os.path.exists(os.path.join(resultsDir, "results.xml")):
                shutil.move(os.path.join(resultsDir, "results.xml"),
                            os.path.join(resultsDir, newResultsFile))
            if os.path.exists(os.path.join(resultsDir, newResultsFile)):
                successStatus = analyzeResults("%s/%s" %
                                               (resultsDir, newResultsFile))
            else:
                g_logger.error("Could not find results file: %s",
                               newResultsFile)
                successStatus = False

            if args.pipelineJson:
                with open(args.pipelineJson) as jsonFile:
                    pipelineParams = json.load(jsonFile)

                pipelineParams["integration_test"] = {
                    "testStatus": successStatus
                }
                with open(args.pipelineJson, "w") as jsonFile:
                    jsonFile.write(
                        json.dumps(pipelineParams, ensure_ascii=False))

            if successStatus:
                postTestRunAction(instanceId, terminate=True, **g_config)
            else:
                postTestRunAction(instanceId, terminate=False, **g_config)
                raise TestsFailed("Integration test failed")
def main():
  """
    This is the main class.
  """
  args = parseArgs()

  global g_logger
  g_logger = initPipelineLogger("run-integration-tests", logLevel=args.logLevel)

  if not (os.environ.get("AWS_ACCESS_KEY_ID") and
          os.environ.get("AWS_SECRET_ACCESS_KEY")):
    g_logger.error("Please set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY")
    raise MissingAWSKeysInEnvironment("AWS keys are not set")
  else:
    g_config["AWS_ACCESS_KEY_ID"] = os.environ["AWS_ACCESS_KEY_ID"]
    g_config["AWS_SECRET_ACCESS_KEY"] = os.environ["AWS_SECRET_ACCESS_KEY"]

  #launching instance with the give AMI
  publicDnsName, instanceId = launchInstance(args.ami, g_config, g_logger)

  resultsDir = prepareResultsDir()
  serverKey = os.path.join("~", ".ssh", g_config["KEY"] + ".pem")

  # The calls in this function are not signal-safe. However, the expectation is
  # that making them signal safe would be overly burdensome at this time. If
  # issues arise later, then we'll figure out what the right approach is at that
  # time.
  def handleSignalInterrupt(signal, _frame):
    g_logger.error("Received interrupt signal %s", signal)
    if instanceId:
      g_logger.error("Terminating instance %s", instanceId)
      terminateInstance(instanceId, g_config, g_logger)

  signal.signal(signal.SIGINT, handleSignalInterrupt)
  signal.signal(signal.SIGTERM, handleSignalInterrupt)

  with settings(host_string=publicDnsName,
                key_filename=serverKey,
                user=g_config["USER"], connection_attempts=30, warn_only=True):
    g_logger.info("Connected to %s using %s.pem", publicDnsName, serverKey)
    # Run Integration tests
    try:
      waitForGrokServerToBeReady(publicDnsName, serverKey, g_config["USER"],
                                 g_logger)
      getApiKey(instanceId, publicDnsName, g_config, g_logger)
      # TODO remove the exports; keeping them intact for now because some of the
      # integration tests use the ConfigAttributePatch which reads these values
      # from environment.
      runTestCommand = ("export AWS_ACCESS_KEY_ID=%s"
                        % os.environ["AWS_ACCESS_KEY_ID"] +
                        " && export AWS_SECRET_ACCESS_KEY=%s"
                        % os.environ["AWS_SECRET_ACCESS_KEY"] +
                        " && source /etc/grok/supervisord.vars" +
                        " && cd $GROK_HOME" +
                        " && ./run_tests.sh --integration --language py" +
                        " --results xunit jenkins")
      run(runTestCommand)
      g_logger.debug("Retreiving results")
      get("%s" % (g_remotePath), resultsDir)
    except Exception:
      g_logger.exception("Caught exception in run_tests")
      stopInstance(instanceId, g_config, g_logger)
      raise
    else:
      g_logger.info("Tests have finished.")

      # Rename the results file to be job specific
      newResultsFile = "grok_integration_test_results_%s.xml" % getBuildNumber()
      if os.path.exists(os.path.join(resultsDir, "results.xml")):
        shutil.move(os.path.join(resultsDir, "results.xml"),
                    os.path.join(resultsDir, newResultsFile))
      if os.path.exists(os.path.join(resultsDir, newResultsFile)):
        successStatus = analyzeResults("%s/%s" % (resultsDir, newResultsFile))
      else:
        g_logger.error("Could not find results file: %s", newResultsFile)
        successStatus = False

      if args.pipelineJson:
        with open(args.pipelineJson) as jsonFile:
          pipelineParams = json.load(jsonFile)

        pipelineParams["integration_test"] = {"testStatus": successStatus}
        with open(args.pipelineJson, "w") as jsonFile:
          jsonFile.write(json.dumps(pipelineParams, ensure_ascii=False))

      if successStatus:
        postTestRunAction(instanceId, terminate=True, **g_config)
      else:
        postTestRunAction(instanceId, terminate=False, **g_config)
        raise TestsFailed("Integration test failed")