Exemple #1
0
def main():
    # validate the parameters
    if len(sys.argv) < 3:
        usage()
    assertCurrentJython()
    logger.info(CONFIG)
    grinderData = sys.argv[1]
    grinderDataFiles = []
    grinderOutFile = sys.argv[2]
    if len(grinderOutFile.split()) > 1:
        logger.fatal("FATAL: grinder analyzer only supports using a single out_* file")
        usage()
    agents = 1
    if len(sys.argv) < 4:
        logger.warn("Optional TPS multiplier not set.  Using 1")
    else:
        agents = int(sys.argv[3])

    # build the list of grinder data files specified on the command line
    for grinderDataFile in grinderData.strip().split(" "):
        if grinderDataFile is "":
            continue
        if tail(grinderDataFile, CONFIG.buckets + 1, ignoreBlankLines=True).__len__() < CONFIG.buckets:
            logger.fatal("")
            logger.fatal("FATAL: insufficient test data to graph.  conf/analyzer.properties specifies")
            logger.fatal("       " + str(CONFIG.buckets) + " buckets, but " + grinderDataFile + " contains")
            logger.fatal("       less than " + str(CONFIG.buckets) + " data points.")
            sys.exit(1)
        grinderDataFiles.append(grinderDataFile)
        if grinderDataFile.rfind("data_") < 0:
            logger.fatal("Invalid grinder data file: " + grinderDataFile)
            usage()
    logger.info("Grinder data files specified: %d" % grinderDataFiles.__len__())
    if grinderOutFile.rfind("out_") < 0:
        logger.fatal("Invalid grinder out file: " + grinderOutFile)
        usage()
    summaryData = SummaryDataRegistry(grinderOutFile)

    # generate HTML report
    reporter = ga.report.getReporter(grinderOutFile, summaryData)
    reporter.readGrinderOutFile(summaryData)

    # generate the graphs
    analyzer = ClientLogAnalyzer(grinderDataFiles, summaryData, agents)
    transactions = analyzer.getTransactionMap()
    for transactionNumber in transactions.keys():
        datasets = analyzer.getDataSets(transactionNumber)
        transactionName = transactions[transactionNumber]
        perfDatasets = [datasets[TX_SEC_KEY], datasets[SIMPLE_RESPONSE_TIME_KEY]]
        perfGrapher = ga.graph.PerformanceGrapher(perfDatasets, transactionName, TEST_START_TIME)
        perfGrapher.saveChartToDisk(CONFIG.reportDir)
        # BAIL HERE if non-http
        if not VORPAL.getPlugin("analyzer").isHTTP():
            continue
        bw = ga.graph.BandwidthGrapher([datasets[THROUGHPUT_KEY]], transactionName, TEST_START_TIME)
        bw.saveChartToDisk(CONFIG.reportDir)
        rtGrapher = ga.graph.ResponseTimeGrapher([datasets[FULL_RESPONSE_TIME_KEY]], transactionName, TEST_START_TIME)
        rtGrapher.saveChartToDisk(CONFIG.reportDir)
    reporter.writeReportToFile()
    logger.warn("Log file analysis completed successfully.")
 def _setTestDuration_(self, grinderDataFile):
     """ Determine the duration of the test by reading the timestamps 
     from the first and last lines of the data files. """
     maxElapsedTime = long(0)
     elapsedTime = 0
     for grinderDataFile in self.dataFiles:
         # grinder data is not guaranteed to be in chronological order.
         # look at the last 200 lines to find the max elapsed time
         # value.
         for line in tail(grinderDataFile, 200, ignoreBlankLines=True):
             # Trilok Khairnar's fix for data file containing < 200 lines
             elapsedTimeword = line.split(",")[ELAPSED_TIME_COLUMN].strip()
             if elapsedTimeword.find("tart") < 0:  # skip the header row for G 3.0 and 3.1
                 elapsedTime = long(elapsedTimeword) - TEST_START_TIME
             if elapsedTime > maxElapsedTime:
                 maxElapsedTime = elapsedTime
         self.msPerBucket = maxElapsedTime / CONFIG.buckets + 1  # round up
     # TODO -- make determining max time more bulletproof.  Perhaps detect file
     #        size and tail more lines for larger files.
     maxElapsedTime = long(maxElapsedTime * 1.00)  # add a 1% safety factor
     self.maxElapsedTime = maxElapsedTime - TEST_START_TIME
     logger.debug("DEBUG: Max elapsed time == " + str(self.maxElapsedTime))
Exemple #3
0
 def _setTestDuration_(self, grinderDataFile):
     ''' Determine the duration of the test by reading the timestamps 
     from the first and last lines of the data files. '''
     maxElapsedTime = long(0)
     elapsedTime = 0
     for grinderDataFile in self.dataFiles:
         # grinder data is not guaranteed to be in chronological order.
         # look at the last 200 lines to find the max elapsed time
         # value.
         for line in tail(grinderDataFile, 200, ignoreBlankLines=True):
             # Trilok Khairnar's fix for data file containing < 200 lines
             elapsedTimeword = line.split(",")[ELAPSED_TIME_COLUMN].strip()
             if elapsedTimeword.find(
                     "tart") < 0:  # skip the header row for G 3.0 and 3.1
                 elapsedTime = long(elapsedTimeword) - TEST_START_TIME
             if elapsedTime > maxElapsedTime:
                 maxElapsedTime = elapsedTime
         self.msPerBucket = maxElapsedTime / CONFIG.buckets + 1  # round up
     #TODO -- make determining max time more bulletproof.  Perhaps detect file
     #        size and tail more lines for larger files.
     maxElapsedTime = long(maxElapsedTime * 1.00)  # add a 1% safety factor
     self.maxElapsedTime = maxElapsedTime - TEST_START_TIME
     logger.debug("DEBUG: Max elapsed time == " + str(self.maxElapsedTime))
Exemple #4
0
def main():
    # validate the parameters
    if len(sys.argv) < 3:
        usage()
    assertCurrentJython()
    logger.info(CONFIG)
    grinderData = sys.argv[1]
    grinderDataFiles = []
    grinderOutFile = sys.argv[2]
    if len(grinderOutFile.split()) > 1:
        logger.fatal(
            "FATAL: grinder analyzer only supports using a single out_* file")
        usage()
    agents = 1
    if len(sys.argv) < 4:
        logger.warn("Optional TPS multiplier not set.  Using 1")
    else:
        agents = int(sys.argv[3])

    # build the list of grinder data files specified on the command line
    for grinderDataFile in grinderData.strip().split(' '):
        if grinderDataFile is '':
            continue
        if tail(grinderDataFile, CONFIG.buckets + 1,
                ignoreBlankLines=True).__len__() < CONFIG.buckets:
            logger.fatal("")
            logger.fatal(
                "FATAL: insufficient test data to graph.  conf/analyzer.properties specifies"
            )
            logger.fatal("       " + str(CONFIG.buckets) + " buckets, but " +
                         grinderDataFile + " contains")
            logger.fatal("       less than " + str(CONFIG.buckets) +
                         " data points.")
            sys.exit(1)
        grinderDataFiles.append(grinderDataFile)
        if grinderDataFile.rfind("data_") < 0:
            logger.fatal("Invalid grinder data file: " + grinderDataFile)
            usage()
    logger.info("Grinder data files specified: %d" %
                grinderDataFiles.__len__())
    if grinderOutFile.rfind("out_") < 0:
        logger.fatal("Invalid grinder out file: " + grinderOutFile)
        usage()
    summaryData = SummaryDataRegistry(grinderOutFile)

    # generate HTML report
    reporter = ga.report.getReporter(grinderOutFile, summaryData)
    reporter.readGrinderOutFile(summaryData)

    # generate the graphs
    analyzer = ClientLogAnalyzer(grinderDataFiles, summaryData, agents)
    transactions = analyzer.getTransactionMap()
    for transactionNumber in transactions.keys():
        datasets = analyzer.getDataSets(transactionNumber)
        transactionName = transactions[transactionNumber]
        perfDatasets = [
            datasets[TX_SEC_KEY], datasets[SIMPLE_RESPONSE_TIME_KEY]
        ]
        perfGrapher = ga.graph.PerformanceGrapher(perfDatasets,
                                                  transactionName,
                                                  TEST_START_TIME)
        perfGrapher.saveChartToDisk(CONFIG.reportDir)
        # BAIL HERE if non-http
        if not VORPAL.getPlugin("analyzer").isHTTP():
            continue
        bw = ga.graph.BandwidthGrapher([datasets[THROUGHPUT_KEY]],
                                       transactionName, TEST_START_TIME)
        bw.saveChartToDisk(CONFIG.reportDir)
        rtGrapher = ga.graph.ResponseTimeGrapher(
            [datasets[FULL_RESPONSE_TIME_KEY]], transactionName,
            TEST_START_TIME)
        rtGrapher.saveChartToDisk(CONFIG.reportDir)
    reporter.writeReportToFile()
    logger.warn("Log file analysis completed successfully.")