def main():
    """main"""

    log_level_parser = log_level_args.get_parser()

    parser = argparse.ArgumentParser(
        description=
        """Analyze downloaded Test Log files producing json summaries..

""",
        formatter_class=ArgumentFormatter,
        epilog="""

You can save a set of arguments to a file and specify them later using
the @argfile syntax. The arguments contained in the file will replace
@argfile in the command line. Multiple files can be loaded into the
command line through the use of the @ syntax.

Each argument and its value must be on separate lines in the file.

""",
        parents=[log_level_parser],
        fromfile_prefix_chars='@')

    parser.add_argument("--path", required=True, help="Log.")

    parser.add_argument("--filename",
                        default="live_backing.log",
                        help="Base log filename suffix.")

    parser.add_argument("--include-tests",
                        action='store_true',
                        default=False,
                        help="Include TEST- lines.")

    parser.add_argument("--dechunk",
                        action='store_true',
                        default=False,
                        help="Combine chunks.")

    parser.add_argument("--raw",
                        action='store_true',
                        default=False,
                        help="Do not reformat/indent json.")

    parser.set_defaults(func=analyze_logs)

    args = parser.parse_args()

    logging.basicConfig(level=getattr(logging, args.log_level))
    logger = logging.getLogger()
    logger.debug("main %s", args)

    data = args.func(args)

    if args.raw:
        json.dump(data, sys.stdout)
    else:
        json.dump(data, sys.stdout, indent=2, sort_keys=True)
示例#2
0
def main():
    parent_parsers = [
        log_level_args.get_parser(),
        pushes_args.get_parser(),
        treeherder_urls_args.get_parser(),
        activedata_urls_args.get_parser()
    ]

    additional_descriptions = [
        parser.description for parser in parent_parsers if parser.description
    ]
    additional_epilogs = [
        parser.epilog for parser in parent_parsers if parser.epilog
    ]

    parser = argparse.ArgumentParser(description="""ActiveData compare-tests

%s
""" % '\n\n'.join(additional_descriptions),
                                     formatter_class=ArgumentFormatter,
                                     epilog="""
%s

You can save a set of arguments to a file and specify them later
using the @argfile syntax. The arguments contained in the file will
replace @argfile in the command line. Multiple files can be loaded
into the command line through the use of the @ syntax. Each argument
and its value must be on separate lines in the file.
""" % '\n\n'.join(additional_epilogs),
                                     parents=parent_parsers,
                                     fromfile_prefix_chars='@')

    parser.add_argument("--combine-chunks",
                        action="store_true",
                        default=False,
                        help="Combine chunks")

    parser.add_argument(
        "--output-push-differences-only",
        action="store_true",
        default=False,
        help=
        """When loading multiple pushes, only output keys which have different
        values for sub_keys across the
        pushes.""")

    parser.set_defaults(func=compare_tests)

    args = parser.parse_args()

    init_treeherder(args.treeherder_url)

    logging.basicConfig(level=getattr(logging, args.log_level))
    logger = logging.getLogger()
    logger.debug("main %s", args)

    args.func(args)
def main():
    parent_parsers = [log_level_args.get_parser(),
                      activedata_urls_args.get_parser()]

    additional_descriptions = [parser.description for parser in parent_parsers
                               if parser.description]
    additional_epilogs = [parser.epilog for parser in parent_parsers if parser.epilog]

    parser = argparse.ArgumentParser(
        description="""Query ActiveData tests and write the result as json to stdout.

%s
""" % '\n\n'.join(additional_descriptions),
        formatter_class=ArgumentFormatter,
        epilog="""
%s

You can save a set of arguments to a file and specify them later
using the @argfile syntax. The arguments contained in the file will
replace @argfile in the command line. Multiple files can be loaded
into the command line through the use of the @ syntax. Each argument
and its value must be on separate lines in the file.
""" % '\n\n'.join(additional_epilogs),
        parents=parent_parsers,
        fromfile_prefix_chars='@'
    )

    parser.add_argument("--file",
                        required=True,
                        help="File containing ActiveData query as json..")

    parser.add_argument(
        "--raw",
        action='store_true',
        default=False,
        help="Do not reformat/indent json.")

    parser.set_defaults(func=query)

    args = parser.parse_args()

    logging.basicConfig(level=getattr(logging, args.log_level))

    activedata_json = args.func(args)

    if args.raw:
        json.dump(activedata_json, sys.stdout)
    else:
        json.dump(activedata_json, sys.stdout, indent=2)
示例#4
0
def main():
    global logger

    log_level_parser = log_level_args.get_parser()

    parser = argparse.ArgumentParser(
        description="""Analyze Log Summary.

Consumes the output of analyze_log_summary.py to produce either a detailed report
or a summary report in csv format.

The detailed report contains the taskcluster_runtime and every field contained in
the PERFHERDER_DATA from the original log file.

The summary report contains the average and standard deviations of the PERFHERDER_DATA.
""",
        formatter_class=ArgumentFormatter,
        epilog="""You can save a set of arguments to a file and specify them later
using the @argfile syntax. The arguments contained in the file will
replace @argfile in the command line. Multiple files can be loaded
into the command line through the use of the @ syntax. Each argument
and its value must be on separate lines in the file.""",
        parents=[log_level_parser],
        fromfile_prefix_chars='@'
    )

    parser.add_argument('--file',
                        default=None,
                        required=True,
                        help='Path to summarized log file to analyze.')
    parser.add_argument('--report',
                        default='detailed',
                        choices=['detailed', 'summary'],
                        help='Choose the type of report to be generated. "detailed" will output '
                        'each measurement while "summary" will calculate the mean and sample '
                        'standard deviation of the measurements.')

    args = parser.parse_args()

    logging.basicConfig(level=getattr(logging, args.log_level))
    logger = logging.getLogger()

    data = load_json_data(args.file)
    measurements = extract_measurements(data)
    if args.report == 'detailed':
        generate_detailed_report(measurements)
    else:
        generate_summary_report(measurements)
def main():
    global logger

    log_level_parser = log_level_args.get_parser()

    parser = argparse.ArgumentParser(
        description="""Analyze Log Summary.

Consumes the output of combine_log_summaries.py to produce either a
summary report in csv format comparing the different summaries in the
combination.

The summary report contains the average and standard deviations of the PERFHERDER_DATA.
""",
        formatter_class=ArgumentFormatter,
        epilog=
        """You can save a set of arguments to a file and specify them later
using the @argfile syntax. The arguments contained in the file will
replace @argfile in the command line. Multiple files can be loaded
into the command line through the use of the @ syntax. Each argument
and its value must be on separate lines in the file.""",
        parents=[log_level_parser],
        fromfile_prefix_chars='@')

    parser.add_argument('--file',
                        default=None,
                        required=True,
                        help='Path to summarized log file to analyze.')

    args = parser.parse_args()

    logging.basicConfig(level=getattr(logging, args.log_level))
    logger = logging.getLogger()

    data = load_json_data(args.file)
    measurements = extract_measurements(data)
    aliases = extract_aliases(data)
    generate_report(aliases, measurements)
示例#6
0
def main():
    """main"""

    parent_parsers = [
        log_level_args.get_parser(),
        treeherder_urls_args.get_parser(),
        pushes_args.get_parser(),
        jobs_args.get_parser(),
    ]

    additional_descriptions = [
        parser.description for parser in parent_parsers if parser.description
    ]
    additional_epilogs = [
        parser.epilog for parser in parent_parsers if parser.epilog
    ]

    parser = argparse.ArgumentParser(description="""
Downloads pushes and jobs data from Treeherder, writing results as nested json to
stdout.

%s

""" % '\n\n'.join(additional_descriptions),
                                     formatter_class=ArgumentFormatter,
                                     epilog="""
%s

You can save a set of arguments to a file and specify them later using
the @argfile syntax. The arguments contained in the file will replace
@argfile in the command line. Multiple files can be loaded into the
command line through the use of the @ syntax.

Each argument and its value must be on separate lines in the file.

""" % '\n\n'.join(additional_epilogs),
                                     parents=parent_parsers,
                                     fromfile_prefix_chars='@')

    parser.add_argument(
        '--cache',
        default='~/cia_tools_cache/',
        help='Directory used to store cached objects retrieved from Bugzilla '
        'and Treeherder.')

    parser.add_argument('--update-cache',
                        default=False,
                        action='store_true',
                        help='Recreate cached files with fresh data.')

    parser.add_argument('--dump-cache-stats',
                        action='store_true',
                        default=False,
                        help='Dump cache statistics to stderr.')

    parser.add_argument("--raw",
                        action='store_true',
                        default=False,
                        help="Do not reformat/indent json.")

    parser.set_defaults(func=get_pushes_jobs_json)

    args = parser.parse_args()

    args.cache = os.path.expanduser(args.cache)

    if not os.path.isdir(args.cache):
        os.makedirs(args.cache)
    cache.CACHE_HOME = args.cache

    init_treeherder(args.treeherder_url)

    if args.revision_url:
        (args.repo, _, args.revision) = args.revision_url.split('/')[-3:]

    pushes_args.compile_filters(args)
    jobs_args.compile_filters(args)

    logging.basicConfig(level=getattr(logging, args.log_level))
    logger = logging.getLogger()
    logger.debug("main %s", args)

    pushes = args.func(args, args.repo)

    if args.raw:
        json.dump(pushes, sys.stdout)
    else:
        json.dump(pushes, sys.stdout, indent=2)

    if args.dump_cache_stats:
        cache.stats()
def main():
    """main"""
    global logger

    parent_parsers = [
        log_level_args.get_parser(),
        treeherder_urls_args.get_parser(),
        pushes_args.get_parser(),
        jobs_args.get_parser(),
    ]

    additional_descriptions = [
        parser.description for parser in parent_parsers if parser.description
    ]
    additional_epilogs = [
        parser.epilog for parser in parent_parsers if parser.epilog
    ]

    parser = argparse.ArgumentParser(description="""
Analyze pushes from bugs marked with whiteboard [test isolation] or a
value specified from the command options.

Queries Bugzilla for bugs marked with [test isolation] in the whiteboard,
determines the bug number, bug summary and revision from the bug then reads
push and job data from Treeherder and produces a summary of runtimes and
test failures, writing results as either csv text or json to stdout. By
default, output is writtenas formatted json.

Intermediate results are stored in a cache directory to re-used on subsequent
runs. When changing options, it is safest to delete the cache directory and
start over.

%s

""" % '\n\n'.join(additional_descriptions),
                                     formatter_class=ArgumentFormatter,
                                     epilog="""
%s

You can save a set of arguments to a file and specify them later using
the @argfile syntax. The arguments contained in the file will replace
@argfile in the command line. Multiple files can be loaded into the
command line through the use of the @ syntax.

Each argument and its value must be on separate lines in the file.

""" % '\n\n'.join(additional_epilogs),
                                     parents=parent_parsers,
                                     fromfile_prefix_chars='@')

    parser.add_argument(
        '--whiteboard',
        default='[test isolation]',
        help='Bugzilla whiteboard value used to select the appropriate bugs. '
        'Should only be used with --bug.')

    parser.add_argument(
        '--override-bug-summary',
        default=None,
        help='When reprocessing a bug with a problematic bug summary '
        'or when using --whiteboard to select a bug not filed by '
        'intermittent-bug-filer, specify an override bug summary '
        'to mimic an intermittent bug summary to be used to determine '
        'if a failure or test is reproduced. Otherwise the original bug '
        'summary will be used. Should only be used with --bug.')

    parser.add_argument(
        '--cache',
        default='~/cia_tools_cache/',
        help='Directory used to store cached objects retrieved from Bugzilla '
        'and Treeherder.')

    parser.add_argument('--update-cache',
                        default=False,
                        action='store_true',
                        help='Recreate cached files with fresh data.')

    parser.add_argument('--dump-cache-stats',
                        action='store_true',
                        default=False,
                        help='Dump cache statistics to stderr.')

    parser.add_argument(
        '--bug-creation-time',
        help='Starting creation time in YYYY-MM-DD or '
        'YYYY-MM-DDTHH:MM:SSTZ format. '
        'Example 2019-07-27T17:28:00PDT or 2019-07-28T00:28:00Z',
        default='2019-06-01T00:00:00Z')

    parser.add_argument(
        '--bugs-after',
        type=int,
        help='Only returns bugs whose id is greater than this integer.',
        default=None)

    parser.add_argument('--bug',
                        dest='bugs',
                        type=int,
                        action='append',
                        default=[],
                        help='Only returns results for bug the specified bug.')

    parser.add_argument('--raw',
                        action='store_true',
                        default=False,
                        help='Do not reformat/indent json.')

    parser.add_argument(
        '--csv-summary',
        action='store_true',
        default=False,
        help=
        'Output summary data in csv format. Does not include individual failures or tests.'
    )

    parser.add_argument(
        '--csv-results',
        action='store_true',
        default=False,
        help=
        'Output test data in csv format. Does not include individual failures.'
    )

    parser.add_argument('--include-failures',
                        action='store_true',
                        default=False,
                        help='Include individual failures in output.')

    parser.add_argument('--include-tests',
                        action='store_true',
                        default=False,
                        help='Include individual tests in output.')

    parser.set_defaults(func=summarize_isolation_pushes_jobs_json)

    args = parser.parse_args()

    args.cache = os.path.expanduser(args.cache)

    pushes_args.compile_filters(args)
    jobs_args.compile_filters(args)

    if args.test_failure_pattern:
        args.test_failure_pattern = re.compile(args.test_failure_pattern)

    if ('test isolation' not in args.whiteboard
            or args.override_bug_summary) and not args.bugs:
        parser.error('--bug must be specified if either --whiteboard or '
                     '--override-test are specified.')

    logging.basicConfig(level=getattr(logging, args.log_level))
    logger = logging.getLogger()
    logger.debug('main %s', args)

    if not os.path.isdir(args.cache):
        os.makedirs(args.cache)
    cache.CACHE_HOME = args.cache

    init_treeherder(args.treeherder_url)

    summary = args.func(args)

    if args.raw:
        json.dump(summary, sys.stdout)
    elif args.csv_summary:
        output_csv_summary(args, summary)
    elif args.csv_results:
        output_csv_results(args, summary)
    else:
        json.dump(summary, sys.stdout, indent=2)

    if args.dump_cache_stats:
        cache.stats()
def main():
    """main"""

    parent_parsers = [
        log_level_args.get_parser(),
        pushes_args.get_parser(),
        jobs_args.get_parser(),
        treeherder_urls_args.get_parser()
    ]

    additional_descriptions = [
        parser.description for parser in parent_parsers if parser.description
    ]
    additional_epilogs = [
        parser.epilog for parser in parent_parsers if parser.epilog
    ]

    parser = argparse.ArgumentParser(
        description="""Download Job Details files from Treeherder/Taskcluster.

--download-job-details specifies a regular expression which will be matched
against the base file name of the url to the file to select the files to be
downloaded. This is not a shell glob pattern, but a full regular expression.
Files will be saved to the output directory using the path to the job detail
and a file name encoded with meta data as:

output/revision/job_guid/job_guid_run/path/platform,buildtype,job_name,job_type_symbol,filename

if --alias is specified, a soft link will be created from
output/revision to output/alias.

%s

""" % '\n\n'.join(additional_descriptions),
        formatter_class=ArgumentFormatter,
        epilog="""
%s

You can save a set of arguments to a file and specify them later using
the @argfile syntax. The arguments contained in the file will replace
@argfile in the command line. Multiple files can be loaded into the
command line through the use of the @ syntax.

Each argument and its value must be on separate lines in the file.

""" % '\n\n'.join(additional_epilogs),
        parents=parent_parsers,
        fromfile_prefix_chars='@')

    parser.add_argument(
        "--download-job-details",
        dest="download_job_details",
        default=None,
        required=True,
        help="""Regular expression matching Job details url basenames to be
        downloaded.  Example:live_backing.log|logcat.*.log. Default
        None.""")

    parser.add_argument(
        '--cache',
        default='~/cia_tools_cache/',
        help='Directory used to store cached objects retrieved from Bugzilla '
        'and Treeherder.')

    parser.add_argument('--update-cache',
                        default=False,
                        action='store_true',
                        help='Recreate cached files with fresh data.')

    parser.add_argument('--dump-cache-stats',
                        action='store_true',
                        default=False,
                        help='Dump cache statistics to stderr.')

    parser.add_argument("--output",
                        dest="output",
                        default="output",
                        help="Directory where to save downloaded job details.")

    parser.add_argument(
        "--alias",
        dest="alias",
        default=None,
        help=
        "Alias (soft link) to revision subdirectory where the downloaded job details were saved."
    )

    parser.set_defaults(func=download_treeherder_job_details)

    args = parser.parse_args()

    args.cache = os.path.expanduser(args.cache)

    if not os.path.isdir(args.cache):
        os.makedirs(args.cache)
    cache.CACHE_HOME = args.cache

    init_treeherder(args.treeherder_url)

    if args.revision_url:
        (args.repo, _, args.revision) = args.revision_url.split('/')[-3:]

    pushes_args.compile_filters(args)
    jobs_args.compile_filters(args)

    logging.basicConfig(level=getattr(logging, args.log_level))
    logger = logging.getLogger()
    logger.debug("main %s", args)

    args.func(args)

    if args.dump_cache_stats:
        cache.stats()
def main():
    global logger

    log_level_parser = log_level_args.get_parser()

    parser = argparse.ArgumentParser(
        description="""Combine analyzed Test Log json files.
""",
        formatter_class=ArgumentFormatter,
        epilog=
        """You can save a set of arguments to a file and specify them later
using the @argfile syntax. The arguments contained in the file will
replace @argfile in the command line. Multiple files can be loaded
into the command line through the use of the @ syntax. Each argument
and its value must be on separate lines in the file.""",
        parents=[log_level_parser],
        fromfile_prefix_chars='@')

    parser.add_argument('--file', dest='files', action='append')

    parser.add_argument('--alias', dest='aliases', action='append')

    parser.add_argument('--differences',
                        action='store_true',
                        default=False,
                        help="Output only differences in data.")

    parser.add_argument(
        '--ignore',
        default=None,
        help=
        "Ignore keys matching regular expression when calculating differences."
    )

    parser.add_argument(
        '--munge-test-data',
        action='store_true',
        default=False,
        help="Modify TEST- lines in output to improve comparibility.")

    args = parser.parse_args()

    logging.basicConfig(level=getattr(logging, args.log_level))
    logger = logging.getLogger()

    combined_data = {"aliases": {}, "combined": {}}

    alias_list = []
    for aliasmap in args.aliases:
        (key, alias) = aliasmap.split(':')
        alias_list.append(alias)
        combined_data["aliases"][key] = alias

    if args.ignore:
        re_ignore = re.compile(args.ignore)
    else:
        re_ignore = None

    for input_file_path in args.files:
        with open(input_file_path) as input_file:
            input_json = json.load(input_file)
            for key in input_json.keys():
                data = input_json[key]
                alias_key = combined_data["aliases"][key]

                sub_keys = data.keys()

                for sub_key in sub_keys:
                    if args.munge_test_data and 'test_data' in data[sub_key]:
                        munge_test_data(data[sub_key]['test_data'])

                    if sub_key not in combined_data["combined"]:
                        combined_data["combined"][sub_key] = {}
                    combined_data["combined"][sub_key][alias_key] = data[
                        sub_key]

    if not args.differences:
        output_data = combined_data
    else:
        output_data = compare_aliases(re_ignore, alias_list, combined_data)

    json.dump(output_data, sys.stdout, indent=2, sort_keys=True)