def main(): parent_parsers = [ log_level_args.get_parser(), pushes_args.get_parser(), treeherder_urls_args.get_parser(), activedata_urls_args.get_parser() ] additional_descriptions = [ parser.description for parser in parent_parsers if parser.description ] additional_epilogs = [ parser.epilog for parser in parent_parsers if parser.epilog ] parser = argparse.ArgumentParser(description="""ActiveData compare-tests %s """ % '\n\n'.join(additional_descriptions), formatter_class=ArgumentFormatter, epilog=""" %s You can save a set of arguments to a file and specify them later using the @argfile syntax. The arguments contained in the file will replace @argfile in the command line. Multiple files can be loaded into the command line through the use of the @ syntax. Each argument and its value must be on separate lines in the file. """ % '\n\n'.join(additional_epilogs), parents=parent_parsers, fromfile_prefix_chars='@') parser.add_argument("--combine-chunks", action="store_true", default=False, help="Combine chunks") parser.add_argument( "--output-push-differences-only", action="store_true", default=False, help= """When loading multiple pushes, only output keys which have different values for sub_keys across the pushes.""") parser.set_defaults(func=compare_tests) args = parser.parse_args() init_treeherder(args.treeherder_url) logging.basicConfig(level=getattr(logging, args.log_level)) logger = logging.getLogger() logger.debug("main %s", args) args.func(args)
def main(): """main""" parent_parsers = [ log_level_args.get_parser(), treeherder_urls_args.get_parser(), pushes_args.get_parser(), jobs_args.get_parser(), ] additional_descriptions = [ parser.description for parser in parent_parsers if parser.description ] additional_epilogs = [ parser.epilog for parser in parent_parsers if parser.epilog ] parser = argparse.ArgumentParser(description=""" Downloads pushes and jobs data from Treeherder, writing results as nested json to stdout. %s """ % '\n\n'.join(additional_descriptions), formatter_class=ArgumentFormatter, epilog=""" %s You can save a set of arguments to a file and specify them later using the @argfile syntax. The arguments contained in the file will replace @argfile in the command line. Multiple files can be loaded into the command line through the use of the @ syntax. Each argument and its value must be on separate lines in the file. """ % '\n\n'.join(additional_epilogs), parents=parent_parsers, fromfile_prefix_chars='@') parser.add_argument( '--cache', default='~/cia_tools_cache/', help='Directory used to store cached objects retrieved from Bugzilla ' 'and Treeherder.') parser.add_argument('--update-cache', default=False, action='store_true', help='Recreate cached files with fresh data.') parser.add_argument('--dump-cache-stats', action='store_true', default=False, help='Dump cache statistics to stderr.') parser.add_argument("--raw", action='store_true', default=False, help="Do not reformat/indent json.") parser.set_defaults(func=get_pushes_jobs_json) args = parser.parse_args() args.cache = os.path.expanduser(args.cache) if not os.path.isdir(args.cache): os.makedirs(args.cache) cache.CACHE_HOME = args.cache init_treeherder(args.treeherder_url) if args.revision_url: (args.repo, _, args.revision) = args.revision_url.split('/')[-3:] pushes_args.compile_filters(args) jobs_args.compile_filters(args) logging.basicConfig(level=getattr(logging, args.log_level)) logger = logging.getLogger() logger.debug("main %s", args) pushes = args.func(args, args.repo) if args.raw: json.dump(pushes, sys.stdout) else: json.dump(pushes, sys.stdout, indent=2) if args.dump_cache_stats: cache.stats()
def main(): parent_parsers = [ log_level_args.get_parser(), pushes_args.get_parser(), treeherder_urls_args.get_parser(), activedata_urls_args.get_parser() ] additional_descriptions = [ parser.description for parser in parent_parsers if parser.description ] additional_epilogs = [ parser.epilog for parser in parent_parsers if parser.epilog ] parser = argparse.ArgumentParser(description="""ActiveData query tests. Query ActiveData tests and write the result as json to stdout. Errors will be written to stderr. %s """ % '\n\n'.join(additional_descriptions), formatter_class=ArgumentFormatter, epilog=""" %s You can save a set of arguments to a file and specify them later using the @argfile syntax. The arguments contained in the file will replace @argfile in the command line. Multiple files can be loaded into the command line through the use of the @ syntax. Each argument and its value must be on separate lines in the file. """ % '\n\n'.join(additional_epilogs), parents=parent_parsers, fromfile_prefix_chars='@') parser.add_argument("--include-passing-tests", dest="include_passing_tests", action='store_true', default=False, help="Query tests against ActiveData.") parser.add_argument("--raw", action='store_true', default=False, help="Do not reformat/indent json.") parser.set_defaults(func=query_tests) args = parser.parse_args() init_treeherder(args.treeherder_url) logging.basicConfig(level=getattr(logging, args.log_level)) tests = args.func(args) if args.raw: json.dump(tests, sys.stdout) else: json.dump(tests, sys.stdout, indent=2)
def main(): """main""" global logger parent_parsers = [ log_level_args.get_parser(), treeherder_urls_args.get_parser(), pushes_args.get_parser(), jobs_args.get_parser(), ] additional_descriptions = [ parser.description for parser in parent_parsers if parser.description ] additional_epilogs = [ parser.epilog for parser in parent_parsers if parser.epilog ] parser = argparse.ArgumentParser(description=""" Analyze pushes from bugs marked with whiteboard [test isolation] or a value specified from the command options. Queries Bugzilla for bugs marked with [test isolation] in the whiteboard, determines the bug number, bug summary and revision from the bug then reads push and job data from Treeherder and produces a summary of runtimes and test failures, writing results as either csv text or json to stdout. By default, output is writtenas formatted json. Intermediate results are stored in a cache directory to re-used on subsequent runs. When changing options, it is safest to delete the cache directory and start over. %s """ % '\n\n'.join(additional_descriptions), formatter_class=ArgumentFormatter, epilog=""" %s You can save a set of arguments to a file and specify them later using the @argfile syntax. The arguments contained in the file will replace @argfile in the command line. Multiple files can be loaded into the command line through the use of the @ syntax. Each argument and its value must be on separate lines in the file. """ % '\n\n'.join(additional_epilogs), parents=parent_parsers, fromfile_prefix_chars='@') parser.add_argument( '--whiteboard', default='[test isolation]', help='Bugzilla whiteboard value used to select the appropriate bugs. ' 'Should only be used with --bug.') parser.add_argument( '--override-bug-summary', default=None, help='When reprocessing a bug with a problematic bug summary ' 'or when using --whiteboard to select a bug not filed by ' 'intermittent-bug-filer, specify an override bug summary ' 'to mimic an intermittent bug summary to be used to determine ' 'if a failure or test is reproduced. Otherwise the original bug ' 'summary will be used. Should only be used with --bug.') parser.add_argument( '--cache', default='~/cia_tools_cache/', help='Directory used to store cached objects retrieved from Bugzilla ' 'and Treeherder.') parser.add_argument('--update-cache', default=False, action='store_true', help='Recreate cached files with fresh data.') parser.add_argument('--dump-cache-stats', action='store_true', default=False, help='Dump cache statistics to stderr.') parser.add_argument( '--bug-creation-time', help='Starting creation time in YYYY-MM-DD or ' 'YYYY-MM-DDTHH:MM:SSTZ format. ' 'Example 2019-07-27T17:28:00PDT or 2019-07-28T00:28:00Z', default='2019-06-01T00:00:00Z') parser.add_argument( '--bugs-after', type=int, help='Only returns bugs whose id is greater than this integer.', default=None) parser.add_argument('--bug', dest='bugs', type=int, action='append', default=[], help='Only returns results for bug the specified bug.') parser.add_argument('--raw', action='store_true', default=False, help='Do not reformat/indent json.') parser.add_argument( '--csv-summary', action='store_true', default=False, help= 'Output summary data in csv format. Does not include individual failures or tests.' ) parser.add_argument( '--csv-results', action='store_true', default=False, help= 'Output test data in csv format. Does not include individual failures.' ) parser.add_argument('--include-failures', action='store_true', default=False, help='Include individual failures in output.') parser.add_argument('--include-tests', action='store_true', default=False, help='Include individual tests in output.') parser.set_defaults(func=summarize_isolation_pushes_jobs_json) args = parser.parse_args() args.cache = os.path.expanduser(args.cache) pushes_args.compile_filters(args) jobs_args.compile_filters(args) if args.test_failure_pattern: args.test_failure_pattern = re.compile(args.test_failure_pattern) if ('test isolation' not in args.whiteboard or args.override_bug_summary) and not args.bugs: parser.error('--bug must be specified if either --whiteboard or ' '--override-test are specified.') logging.basicConfig(level=getattr(logging, args.log_level)) logger = logging.getLogger() logger.debug('main %s', args) if not os.path.isdir(args.cache): os.makedirs(args.cache) cache.CACHE_HOME = args.cache init_treeherder(args.treeherder_url) summary = args.func(args) if args.raw: json.dump(summary, sys.stdout) elif args.csv_summary: output_csv_summary(args, summary) elif args.csv_results: output_csv_results(args, summary) else: json.dump(summary, sys.stdout, indent=2) if args.dump_cache_stats: cache.stats()
def main(): """main""" parent_parsers = [ log_level_args.get_parser(), pushes_args.get_parser(), jobs_args.get_parser(), treeherder_urls_args.get_parser() ] additional_descriptions = [ parser.description for parser in parent_parsers if parser.description ] additional_epilogs = [ parser.epilog for parser in parent_parsers if parser.epilog ] parser = argparse.ArgumentParser( description="""Download Job Details files from Treeherder/Taskcluster. --download-job-details specifies a regular expression which will be matched against the base file name of the url to the file to select the files to be downloaded. This is not a shell glob pattern, but a full regular expression. Files will be saved to the output directory using the path to the job detail and a file name encoded with meta data as: output/revision/job_guid/job_guid_run/path/platform,buildtype,job_name,job_type_symbol,filename if --alias is specified, a soft link will be created from output/revision to output/alias. %s """ % '\n\n'.join(additional_descriptions), formatter_class=ArgumentFormatter, epilog=""" %s You can save a set of arguments to a file and specify them later using the @argfile syntax. The arguments contained in the file will replace @argfile in the command line. Multiple files can be loaded into the command line through the use of the @ syntax. Each argument and its value must be on separate lines in the file. """ % '\n\n'.join(additional_epilogs), parents=parent_parsers, fromfile_prefix_chars='@') parser.add_argument( "--download-job-details", dest="download_job_details", default=None, required=True, help="""Regular expression matching Job details url basenames to be downloaded. Example:live_backing.log|logcat.*.log. Default None.""") parser.add_argument( '--cache', default='~/cia_tools_cache/', help='Directory used to store cached objects retrieved from Bugzilla ' 'and Treeherder.') parser.add_argument('--update-cache', default=False, action='store_true', help='Recreate cached files with fresh data.') parser.add_argument('--dump-cache-stats', action='store_true', default=False, help='Dump cache statistics to stderr.') parser.add_argument("--output", dest="output", default="output", help="Directory where to save downloaded job details.") parser.add_argument( "--alias", dest="alias", default=None, help= "Alias (soft link) to revision subdirectory where the downloaded job details were saved." ) parser.set_defaults(func=download_treeherder_job_details) args = parser.parse_args() args.cache = os.path.expanduser(args.cache) if not os.path.isdir(args.cache): os.makedirs(args.cache) cache.CACHE_HOME = args.cache init_treeherder(args.treeherder_url) if args.revision_url: (args.repo, _, args.revision) = args.revision_url.split('/')[-3:] pushes_args.compile_filters(args) jobs_args.compile_filters(args) logging.basicConfig(level=getattr(logging, args.log_level)) logger = logging.getLogger() logger.debug("main %s", args) args.func(args) if args.dump_cache_stats: cache.stats()