def main(): usage = 'Usage: %prog project_name author path_to_chromium' # The default HelpFormatter causes the docstring to display improperly. class VanillaHelpFormatter(optparse.IndentedHelpFormatter): def format_description(self, description): if description: return description else: return '' parser = optparse.OptionParser(usage=usage, description=sys.modules[__name__].__doc__, formatter=VanillaHelpFormatter()) parser.add_option('--no-try', action='store_true', dest='notry', help='Create the CL with NOTRY=true') auth.add_auth_options(parser) options, args = parser.parse_args() auth_config = auth.extract_auth_config_from_options(options) if len(args) != 3: parser.print_usage() return 1 AutoRoller(*args, auth_config=auth_config, options=options).main()
def __init__(self, *args, **kwargs): optparse.OptionParser.__init__( self, *args, prog='depot-tools-auth', version=__version__, **kwargs) self.add_option( '-v', '--verbose', action='count', default=0, help='Use 2 times for more debugging info') auth.add_auth_options(self, auth.make_auth_config(use_oauth2=True))
def hook(parser, args, *extra_args, **kwargs): old_parse_args = parser.parse_args def new_parse_args(args=None, values=None): options, args = old_parse_args(args, values) auth_config = auth.extract_auth_config_from_options(options) if not options.issue: parser.error("Require --issue") obj = rietveld.Rietveld(options.server, auth_config, options.user) return options, args, obj parser.parse_args = new_parse_args parser.add_option( "-u", "--user", metavar="U", default=os.environ.get("EMAIL_ADDRESS", None), help="Email address, default: %default", ) parser.add_option("-i", "--issue", metavar="I", type="int", help="Rietveld issue number") parser.add_option( "-s", "--server", metavar="S", default="http://codereview.chromium.org", help="Rietveld server, default: %default", ) auth.add_auth_options(parser) # Call the original function with the modified parser. return fn(parser, args, *extra_args, **kwargs)
def hook(parser, args, *extra_args, **kwargs): old_parse_args = parser.parse_args def new_parse_args(args=None, values=None): options, args = old_parse_args(args, values) auth_config = auth.extract_auth_config_from_options(options) if not options.issue: parser.error('Require --issue') obj = rietveld.Rietveld(options.server, auth_config, options.user) return options, args, obj parser.parse_args = new_parse_args parser.add_option( '-u', '--user', metavar='U', default=os.environ.get('EMAIL_ADDRESS', None), help='Email address, default: %default') parser.add_option( '-i', '--issue', metavar='I', type='int', help='Rietveld issue number') parser.add_option( '-s', '--server', metavar='S', default='http://codereview.chromium.org', help='Rietveld server, default: %default') auth.add_auth_options(parser) # Call the original function with the modified parser. return fn(parser, args, *extra_args, **kwargs)
def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging(usage="%prog <options>", version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, "Data source") data_group.add_option("-s", "--isolated", metavar="FILE", help="File/url describing what to map or run") data_group.add_option("-H", "--hash", help="Hash of the .isolated to grab from the hash table") isolateserver.add_isolate_server_options(data_group, True) parser.add_option_group(data_group) cache_group = optparse.OptionGroup(parser, "Cache management") cache_group.add_option("--cache", default="cache", metavar="DIR", help="Cache directory, default=%default") cache_group.add_option( "--max-cache-size", type="int", metavar="NNN", default=20 * 1024 * 1024 * 1024, help="Trim if the cache gets larger than this value, default=%default", ) cache_group.add_option( "--min-free-space", type="int", metavar="NNN", default=2 * 1024 * 1024 * 1024, help="Trim if disk free space becomes lower than this value, " "default=%default", ) cache_group.add_option( "--max-items", type="int", metavar="NNN", default=100000, help="Trim if more than this number of items are in the cache " "default=%default", ) parser.add_option_group(cache_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(data_group, options) if bool(options.isolated) == bool(options.hash): logging.debug("One and only one of --isolated or --hash is required.") parser.error("One and only one of --isolated or --hash is required.") options.cache = os.path.abspath(options.cache) policies = CachePolicies(options.max_cache_size, options.min_free_space, options.max_items) try: # |options.cache| path may not exist until DiskCache() instance is created. cache = DiskCache(options.cache, policies, isolateserver.get_hash_algo(options.namespace)) remote = options.isolate_server or options.indir with isolateserver.get_storage(remote, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test(options.isolated or options.hash, storage, cache, args) except Exception as e: # Make sure any exception is logged. tools.report_error(e) logging.exception(e) return 1
def CMDarchive(parser, args): """Creates a .isolated file and uploads the tree to an isolate server. All the files listed in the .isolated file are put in the isolate server cache via isolateserver.py. """ add_isolate_options(parser) add_subdir_option(parser) isolateserver.add_isolate_server_options(parser) auth.add_auth_options(parser) options, args = parser.parse_args(args) if args: parser.error('Unsupported argument: %s' % args) process_isolate_options(parser, options) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) result = isolate_and_archive( [(options, unicode(os.getcwd()))], options.isolate_server, options.namespace) if result is None: return EXIT_CODE_UPLOAD_ERROR assert len(result) == 1, result if result.values()[0] is None: return EXIT_CODE_ISOLATE_ERROR return 0
def __init__(self, **kwargs): tools.OptionParserWithLogging.__init__( self, prog='swarming.py', **kwargs) self.server_group = tools.optparse.OptionGroup(self, 'Server') self.server_group.add_option( '-S', '--swarming', metavar='URL', default=os.environ.get('SWARMING_SERVER', ''), help='Swarming server to use') self.add_option_group(self.server_group) auth.add_auth_options(self)
def _get_arg_parser(): parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option( '-v', '--verbose', action='count', default=0, help='Prints debugging infos') parser.add_option( '-e', '--email', help='Email address to access rietveld. If not specified, anonymous ' 'access will be used.') parser.add_option( '-E', '--email-file', help='File containing the email address to access rietveld. ' 'If not specified, anonymous access will be used.') parser.add_option( '-k', '--private-key-file', help='Path to file containing a private key in p12 format for OAuth2 ' 'authentication with "notasecret" password (as generated by Google ' 'Cloud Console).') parser.add_option( '-i', '--issue', type='int', help='Rietveld issue number') parser.add_option( '-p', '--patchset', type='int', help='Rietveld issue\'s patchset number') parser.add_option( '-r', '--root_dir', default=os.getcwd(), help='Root directory to apply the patch') parser.add_option( '-s', '--server', default='http://codereview.chromium.org', help='Rietveld server') parser.add_option('--no-auth', action='store_true', help='Do not attempt authenticated requests.') parser.add_option('--revision-mapping', default='{}', help='When running gclient, annotate the got_revisions ' 'using the revision-mapping.') parser.add_option('-f', '--force', action='store_true', help='Really run apply_issue, even if .update.flag ' 'is detected.') parser.add_option('-b', '--base_ref', help='DEPRECATED do not use.') parser.add_option('--whitelist', action='append', default=[], help='Patch only specified file(s).') parser.add_option('--blacklist', action='append', default=[], help='Don\'t patch specified file(s).') parser.add_option('-d', '--ignore_deps', action='store_true', help='Don\'t run gclient sync on DEPS changes.') parser.add_option('--extra_patchlevel', type='int', help='Number of directories the patch level number should ' 'be incremented (useful for patches from repos with ' 'different directory hierarchies).') auth.add_auth_options(parser) return parser
def main(args): tools.disable_buffering() parser = logging_utils.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) parser.add_option( '--json', help='dump output metadata to json file. When used, run_isolated returns ' 'non-zero only on internal failure') parser.add_option( '--hard-timeout', type='int', help='Enforce hard timeout in execution') parser.add_option( '--grace-period', type='int', help='Grace period between SIGTERM and SIGKILL') data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', help='Hash of the .isolated to grab from the isolate server') isolateserver.add_isolate_server_options(data_group) parser.add_option_group(data_group) isolateserver.add_cache_options(parser) parser.set_defaults(cache='cache') debug_group = optparse.OptionGroup(parser, 'Debugging') debug_group.add_option( '--leak-temp-dir', action='store_true', help='Deliberately leak isolate\'s temp dir for later examination ' '[default: %default]') debug_group.add_option( '--root-dir', help='Use a directory instead of a random one') parser.add_option_group(debug_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) if not options.isolated: parser.error('--isolated is required.') auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) cache = isolateserver.process_cache_options(options) if options.root_dir: options.root_dir = unicode(os.path.abspath(options.root_dir)) if options.json: options.json = unicode(os.path.abspath(options.json)) with isolateserver.get_storage( options.isolate_server, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test( options.isolated, storage, cache, options.leak_temp_dir, options.json, options.root_dir, options.hard_timeout, options.grace_period, args)
def __init__(self, **kwargs): tools.OptionParserWithLogging.__init__(self, prog="swarming.py", **kwargs) self.server_group = tools.optparse.OptionGroup(self, "Server") self.server_group.add_option( "-S", "--swarming", metavar="URL", default=os.environ.get("SWARMING_SERVER", ""), help="Swarming server to use", ) self.add_option_group(self.server_group) auth.add_auth_options(self)
def main(): parser = optparse.OptionParser( usage='usage: %prog --branch <branch> <commit>') parser.add_option( '--branch', '-b', help='The upstream branch to cherry pick to.', metavar='<branch>') auth.add_auth_options(parser) options, args = parser.parse_args() auth_config = auth.extract_auth_config_from_options if not options.branch: parser.error('--branch is required') if len(args) != 1: parser.error('Expecting single argument <commit>') cherry_pick(options.branch, args[0], auth_config) return 0
def main(args): tools.disable_buffering() parser = logging_utils.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', help='Hash of the .isolated to grab from the isolate server') data_group.add_option( '-H', dest='isolated', help=optparse.SUPPRESS_HELP) isolateserver.add_isolate_server_options(data_group) parser.add_option_group(data_group) isolateserver.add_cache_options(parser) parser.set_defaults(cache='cache') debug_group = optparse.OptionGroup(parser, 'Debugging') debug_group.add_option( '--leak-temp-dir', action='store_true', help='Deliberately leak isolate\'s temp dir for later examination ' '[default: %default]') parser.add_option_group(debug_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) if not options.isolated: parser.error('--isolated is required.') auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) cache = isolateserver.process_cache_options(options) with isolateserver.get_storage( options.isolate_server, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test( options.isolated, storage, cache, options.leak_temp_dir, args)
def main(args): tools.disable_buffering() parser = logging_utils.OptionParserWithLogging( usage="%prog <options>", version=__version__, log_file=RUN_ISOLATED_LOG_FILE ) parser.add_option( "--json", help="dump output metadata to json file. When used, run_isolated returns " "non-zero only on internal failure", ) data_group = optparse.OptionGroup(parser, "Data source") data_group.add_option("-s", "--isolated", help="Hash of the .isolated to grab from the isolate server") isolateserver.add_isolate_server_options(data_group) parser.add_option_group(data_group) isolateserver.add_cache_options(parser) parser.set_defaults(cache="cache") debug_group = optparse.OptionGroup(parser, "Debugging") debug_group.add_option( "--leak-temp-dir", action="store_true", help="Deliberately leak isolate's temp dir for later examination " "[default: %default]", ) debug_group.add_option("--root-dir", help="Use a directory instead of a random one") parser.add_option_group(debug_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) if not options.isolated: parser.error("--isolated is required.") auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) cache = isolateserver.process_cache_options(options) with isolateserver.get_storage(options.isolate_server, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test( options.isolated, storage, cache, options.leak_temp_dir, options.json, options.root_dir, args )
def __init__(self, **kwargs): tools.OptionParserWithLogging.__init__(self, **kwargs) self.server_group = tools.optparse.OptionGroup(self, 'Server') self.server_group.add_option( '-S', '--swarming', metavar='URL', default=os.environ.get('SWARMING_SERVER', ''), help='Swarming server to use') isolateserver.add_isolate_server_options(self.server_group) self.add_option_group(self.server_group) auth.add_auth_options(self) self.add_option( '-d', '--dimension', default=[], action='append', nargs=2, dest='dimensions', metavar='FOO bar', help='dimension to filter on') self.add_option( '--priority', type='int', help='The lower value, the more important the task is. It may be ' 'important to specify a higher priority since the default value ' 'will make the task to be triggered only when the bots are idle.') self.add_option( '--deadline', type='int', default=6*60*60, help='Seconds to allow the task to be pending for a bot to run before ' 'this task request expires.')
def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', metavar='FILE', help='File/url describing what to map or run') data_group.add_option( '-H', '--hash', help='Hash of the .isolated to grab from the hash table') isolateserver.add_isolate_server_options(data_group, True) parser.add_option_group(data_group) cache_group = optparse.OptionGroup(parser, 'Cache management') cache_group.add_option( '--cache', default='cache', metavar='DIR', help='Cache directory, default=%default') cache_group.add_option( '--max-cache-size', type='int', metavar='NNN', default=20*1024*1024*1024, help='Trim if the cache gets larger than this value, default=%default') cache_group.add_option( '--min-free-space', type='int', metavar='NNN', default=2*1024*1024*1024, help='Trim if disk free space becomes lower than this value, ' 'default=%default') cache_group.add_option( '--max-items', type='int', metavar='NNN', default=100000, help='Trim if more than this number of items are in the cache ' 'default=%default') parser.add_option_group(cache_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(data_group, options) if bool(options.isolated) == bool(options.hash): logging.debug('One and only one of --isolated or --hash is required.') parser.error('One and only one of --isolated or --hash is required.') options.cache = os.path.abspath(options.cache) policies = CachePolicies( options.max_cache_size, options.min_free_space, options.max_items) try: # |options.cache| path may not exist until DiskCache() instance is created. cache = DiskCache( options.cache, policies, isolateserver.get_hash_algo(options.namespace)) remote = options.isolate_server or options.indir with isolateserver.get_storage(remote, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test( options.isolated or options.hash, storage, cache, args) except Exception as e: # Make sure any exception is logged. tools.report_error(e) logging.exception(e) return 1
def _get_arg_parser(): parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option('-v', '--verbose', action='count', default=0, help='Prints debugging infos') parser.add_option( '-e', '--email', help='Email address to access rietveld. If not specified, anonymous ' 'access will be used.') parser.add_option( '-E', '--email-file', help='File containing the email address to access rietveld. ' 'If not specified, anonymous access will be used.') parser.add_option( '-k', '--private-key-file', help='Path to file containing a private key in p12 format for OAuth2 ' 'authentication with "notasecret" password (as generated by Google ' 'Cloud Console).') parser.add_option('-i', '--issue', type='int', help='Rietveld issue number') parser.add_option('-p', '--patchset', type='int', help='Rietveld issue\'s patchset number') parser.add_option('-r', '--root_dir', default=os.getcwd(), help='Root directory to apply the patch') parser.add_option('-s', '--server', default='http://codereview.chromium.org', help='Rietveld server') parser.add_option('--no-auth', action='store_true', help='Do not attempt authenticated requests.') parser.add_option('--revision-mapping', default='{}', help='When running gclient, annotate the got_revisions ' 'using the revision-mapping.') parser.add_option('-f', '--force', action='store_true', help='Really run apply_issue, even if .update.flag ' 'is detected.') parser.add_option('-b', '--base_ref', help='DEPRECATED do not use.') parser.add_option('--whitelist', action='append', default=[], help='Patch only specified file(s).') parser.add_option('--blacklist', action='append', default=[], help='Don\'t patch specified file(s).') parser.add_option('-d', '--ignore_deps', action='store_true', help='Don\'t run gclient sync on DEPS changes.') auth.add_auth_options(parser) return parser
def __init__(self, *args, **kwargs): optparse.OptionParser.__init__(self, *args, prog="depot-tools-auth", version=__version__, **kwargs) self.add_option("-v", "--verbose", action="count", default=0, help="Use 2 times for more debugging info") auth.add_auth_options(self, auth.make_auth_config(use_oauth2=True))
def main(): # TODO(pgervais): This function is way too long. Split. sys.stdout = Unbuffered(sys.stdout) parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option( '-v', '--verbose', action='count', default=0, help='Prints debugging infos') parser.add_option( '-e', '--email', help='Email address to access rietveld. If not specified, anonymous ' 'access will be used.') parser.add_option( '-E', '--email-file', help='File containing the email address to access rietveld. ' 'If not specified, anonymous access will be used.') parser.add_option( '-k', '--private-key-file', help='Path to file containing a private key in p12 format for OAuth2 ' 'authentication with "notasecret" password (as generated by Google ' 'Cloud Console).') parser.add_option( '-i', '--issue', type='int', help='Rietveld issue number') parser.add_option( '-p', '--patchset', type='int', help='Rietveld issue\'s patchset number') parser.add_option( '-r', '--root_dir', default=os.getcwd(), help='Root directory to apply the patch') parser.add_option( '-s', '--server', default='http://codereview.chromium.org', help='Rietveld server') parser.add_option('--no-auth', action='store_true', help='Do not attempt authenticated requests.') parser.add_option('--revision-mapping', default='{}', help='When running gclient, annotate the got_revisions ' 'using the revision-mapping.') parser.add_option('-f', '--force', action='store_true', help='Really run apply_issue, even if .update.flag ' 'is detected.') parser.add_option('-b', '--base_ref', help='DEPRECATED do not use.') parser.add_option('--whitelist', action='append', default=[], help='Patch only specified file(s).') parser.add_option('--blacklist', action='append', default=[], help='Don\'t patch specified file(s).') parser.add_option('-d', '--ignore_deps', action='store_true', help='Don\'t run gclient sync on DEPS changes.') auth.add_auth_options(parser) options, args = parser.parse_args() auth_config = auth.extract_auth_config_from_options(options) if options.whitelist and options.blacklist: parser.error('Cannot specify both --whitelist and --blacklist') if options.email and options.email_file: parser.error('-e and -E options are incompatible') if (os.path.isfile(os.path.join(os.getcwd(), 'update.flag')) and not options.force): print 'update.flag file found: bot_update has run and checkout is already ' print 'in a consistent state. No actions will be performed in this step.' return 0 logging.basicConfig( format='%(levelname)5s %(module)11s(%(lineno)4d): %(message)s', level=[logging.WARNING, logging.INFO, logging.DEBUG][ min(2, options.verbose)]) if args: parser.error('Extra argument(s) "%s" not understood' % ' '.join(args)) if not options.issue: parser.error('Require --issue') options.server = options.server.rstrip('/') if not options.server: parser.error('Require a valid server') options.revision_mapping = json.loads(options.revision_mapping) # read email if needed if options.email_file: if not os.path.exists(options.email_file): parser.error('file does not exist: %s' % options.email_file) with open(options.email_file, 'rb') as f: options.email = f.read().strip() print('Connecting to %s' % options.server) # Always try un-authenticated first, except for OAuth2 if options.private_key_file: # OAuth2 authentication obj = rietveld.JwtOAuth2Rietveld(options.server, options.email, options.private_key_file) properties = obj.get_issue_properties(options.issue, False) else: # Passing None as auth_config disables authentication. obj = rietveld.Rietveld(options.server, None) properties = None # Bad except clauses order (HTTPError is an ancestor class of # ClientLoginError) # pylint: disable=E0701 try: properties = obj.get_issue_properties(options.issue, False) except urllib2.HTTPError as e: if e.getcode() != 302: raise if options.no_auth: exit('FAIL: Login detected -- is issue private?') # TODO(maruel): A few 'Invalid username or password.' are printed first, # we should get rid of those. except rietveld.upload.ClientLoginError as e: # Fine, we'll do proper authentication. pass if properties is None: obj = rietveld.Rietveld(options.server, auth_config, options.email) try: properties = obj.get_issue_properties(options.issue, False) except rietveld.upload.ClientLoginError as e: print('Accessing the issue requires proper credentials.') return 1 if not options.patchset: options.patchset = properties['patchsets'][-1] print('No patchset specified. Using patchset %d' % options.patchset) issues_patchsets_to_apply = [(options.issue, options.patchset)] depends_on_info = obj.get_depends_on_patchset(options.issue, options.patchset) while depends_on_info: depends_on_issue = int(depends_on_info['issue']) depends_on_patchset = int(depends_on_info['patchset']) try: depends_on_info = obj.get_depends_on_patchset(depends_on_issue, depends_on_patchset) issues_patchsets_to_apply.insert(0, (depends_on_issue, depends_on_patchset)) except urllib2.HTTPError: print ('The patchset that was marked as a dependency no longer ' 'exists: %s/%d/#ps%d' % ( options.server, depends_on_issue, depends_on_patchset)) print 'Therefore it is likely that this patch will not apply cleanly.' print depends_on_info = None num_issues_patchsets_to_apply = len(issues_patchsets_to_apply) if num_issues_patchsets_to_apply > 1: print print 'apply_issue.py found %d dependent CLs.' % ( num_issues_patchsets_to_apply - 1) print 'They will be applied in the following order:' num = 1 for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply: print ' #%d %s/%d/#ps%d' % ( num, options.server, issue_to_apply, patchset_to_apply) num += 1 print for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply: issue_url = '%s/%d/#ps%d' % (options.server, issue_to_apply, patchset_to_apply) print('Downloading patch from %s' % issue_url) try: patchset = obj.get_patch(issue_to_apply, patchset_to_apply) except urllib2.HTTPError as e: print( 'Failed to fetch the patch for issue %d, patchset %d.\n' 'Try visiting %s/%d') % ( issue_to_apply, patchset_to_apply, options.server, issue_to_apply) return 1 if options.whitelist: patchset.patches = [patch for patch in patchset.patches if patch.filename in options.whitelist] if options.blacklist: patchset.patches = [patch for patch in patchset.patches if patch.filename not in options.blacklist] for patch in patchset.patches: print(patch) full_dir = os.path.abspath(options.root_dir) scm_type = scm.determine_scm(full_dir) if scm_type == 'svn': scm_obj = checkout.SvnCheckout(full_dir, None, None, None, None) elif scm_type == 'git': scm_obj = checkout.GitCheckout(full_dir, None, None, None, None) elif scm_type == None: scm_obj = checkout.RawCheckout(full_dir, None, None) else: parser.error('Couldn\'t determine the scm') # TODO(maruel): HACK, remove me. # When run a build slave, make sure buildbot knows that the checkout was # modified. if options.root_dir == 'src' and getpass.getuser() == 'chrome-bot': # See sourcedirIsPatched() in: # http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/ # chromium_commands.py?view=markup open('.buildbot-patched', 'w').close() print('\nApplying the patch from %s' % issue_url) try: scm_obj.apply_patch(patchset, verbose=True) except checkout.PatchApplicationFailed as e: print(str(e)) print('CWD=%s' % os.getcwd()) print('Checkout path=%s' % scm_obj.project_path) return 1 if ('DEPS' in map(os.path.basename, patchset.filenames) and not options.ignore_deps): gclient_root = gclient_utils.FindGclientRoot(full_dir) if gclient_root and scm_type: print( 'A DEPS file was updated inside a gclient checkout, running gclient ' 'sync.') gclient_path = os.path.join(BASE_DIR, 'gclient') if sys.platform == 'win32': gclient_path += '.bat' with annotated_gclient.temp_filename(suffix='gclient') as f: cmd = [ gclient_path, 'sync', '--nohooks', '--delete_unversioned_trees', ] if scm_type == 'svn': cmd.extend(['--revision', 'BASE']) if options.revision_mapping: cmd.extend(['--output-json', f]) retcode = subprocess.call(cmd, cwd=gclient_root) if retcode == 0 and options.revision_mapping: revisions = annotated_gclient.parse_got_revision( f, options.revision_mapping) annotated_gclient.emit_buildprops(revisions) return retcode return 0
def main(): # Silence upload.py. rietveld.upload.verbosity = 0 today = datetime.date.today() begin, end = get_previous_quarter(today) default_email = os.environ.get('EMAIL_ADDRESS') if not default_email: user = os.environ.get('USER') if user: default_email = user + '@chromium.org' parser = optparse.OptionParser(description=__doc__) parser.add_option( '--count', action='store_true', help='Just count instead of printing individual issues') parser.add_option( '-r', '--reviewer', metavar='<email>', default=default_email, help='Filter on issue reviewer, default=%default') parser.add_option( '-b', '--begin', metavar='<date>', help='Filter issues created after the date') parser.add_option( '-e', '--end', metavar='<date>', help='Filter issues created before the date') parser.add_option( '-Q', '--last_quarter', action='store_true', help='Use last quarter\'s dates, e.g. %s to %s' % (begin, end)) parser.add_option( '-i', '--instance_url', metavar='<host>', default='http://codereview.chromium.org', help='Host to use, default is %default') auth.add_auth_options(parser) # Remove description formatting parser.format_description = ( lambda _: parser.description) # pylint: disable=no-member options, args = parser.parse_args() auth_config = auth.extract_auth_config_from_options(options) if args: parser.error('Args unsupported') if options.reviewer is None: parser.error('$EMAIL_ADDRESS and $USER are not set, please use -r') print >> sys.stderr, 'Searching for reviews by %s' % options.reviewer if options.last_quarter: options.begin = begin options.end = end print >> sys.stderr, 'Using range %s to %s' % ( options.begin, options.end) else: if options.begin is None or options.end is None: parser.error('Please specify either --last_quarter or --begin and --end') # Validate dates. try: options.begin = dateutil.parser.parse(options.begin).strftime('%Y-%m-%d') options.end = dateutil.parser.parse(options.end).strftime('%Y-%m-%d') except ValueError as e: parser.error('%s: %s - %s' % (e, options.begin, options.end)) if options.count: print_count( options.reviewer, options.begin, options.end, options.instance_url, auth_config) else: print_reviews( options.reviewer, options.begin, options.end, options.instance_url, auth_config) return 0
def main(): # Silence upload.py. rietveld.upload.verbosity = 0 parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option( '-u', '--user', metavar='<email>', default=os.environ.get('USER'), help='Filter on user, default=%default') parser.add_option( '-b', '--begin', metavar='<date>', help='Filter issues created after the date (mm/dd/yy)') parser.add_option( '-e', '--end', metavar='<date>', help='Filter issues created before the date (mm/dd/yy)') quarter_begin, quarter_end = get_quarter_of(datetime.today() - relativedelta(months=2)) parser.add_option( '-Q', '--last_quarter', action='store_true', help='Use last quarter\'s dates, i.e. %s to %s' % ( quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d'))) parser.add_option( '-Y', '--this_year', action='store_true', help='Use this year\'s dates') parser.add_option( '-w', '--week_of', metavar='<date>', help='Show issues for week of the date (mm/dd/yy)') parser.add_option( '-W', '--last_week', action='count', help='Show last week\'s issues. Use more times for more weeks.') parser.add_option( '-a', '--auth', action='store_true', help='Ask to authenticate for instances with no auth cookie') parser.add_option( '-d', '--deltas', action='store_true', help='Fetch deltas for changes.') parser.add_option( '--no-referenced-issues', action='store_true', help='Do not fetch issues referenced by owned changes. Useful in ' 'combination with --changes-by-issue when you only want to list ' 'issues that have also been modified in the same time period.') parser.add_option( '--skip-own-issues-without-changes', action='store_true', help='Skips listing own issues without changes when showing changes ' 'grouped by referenced issue(s). See --changes-by-issue for more ' 'details.') activity_types_group = optparse.OptionGroup(parser, 'Activity Types', 'By default, all activity will be looked up and ' 'printed. If any of these are specified, only ' 'those specified will be searched.') activity_types_group.add_option( '-c', '--changes', action='store_true', help='Show changes.') activity_types_group.add_option( '-i', '--issues', action='store_true', help='Show issues.') activity_types_group.add_option( '-r', '--reviews', action='store_true', help='Show reviews.') activity_types_group.add_option( '--changes-by-issue', action='store_true', help='Show changes grouped by referenced issue(s).') parser.add_option_group(activity_types_group) output_format_group = optparse.OptionGroup(parser, 'Output Format', 'By default, all activity will be printed in the ' 'following format: {url} {title}. This can be ' 'changed for either all activity types or ' 'individually for each activity type. The format ' 'is defined as documented for ' 'string.format(...). The variables available for ' 'all activity types are url, title and author. ' 'Format options for specific activity types will ' 'override the generic format.') output_format_group.add_option( '-f', '--output-format', metavar='<format>', default=u'{url} {title}', help='Specifies the format to use when printing all your activity.') output_format_group.add_option( '--output-format-changes', metavar='<format>', default=None, help='Specifies the format to use when printing changes. Supports the ' 'additional variable {reviewers}') output_format_group.add_option( '--output-format-issues', metavar='<format>', default=None, help='Specifies the format to use when printing issues. Supports the ' 'additional variable {owner}.') output_format_group.add_option( '--output-format-reviews', metavar='<format>', default=None, help='Specifies the format to use when printing reviews.') output_format_group.add_option( '--output-format-heading', metavar='<format>', default=u'{heading}:', help='Specifies the format to use when printing headings.') output_format_group.add_option( '--output-format-no-url', default='{title}', help='Specifies the format to use when printing activity without url.') output_format_group.add_option( '-m', '--markdown', action='store_true', help='Use markdown-friendly output (overrides --output-format ' 'and --output-format-heading)') output_format_group.add_option( '-j', '--json', action='store_true', help='Output json data (overrides other format options)') parser.add_option_group(output_format_group) auth.add_auth_options(parser) parser.add_option( '-v', '--verbose', action='store_const', dest='verbosity', default=logging.WARN, const=logging.INFO, help='Output extra informational messages.' ) parser.add_option( '-q', '--quiet', action='store_const', dest='verbosity', const=logging.ERROR, help='Suppress non-error messages.' ) parser.add_option( '-M', '--merged-only', action='store_true', dest='merged_only', default=False, help='Shows only changes that have been merged.') parser.add_option( '-C', '--completed-issues', action='store_true', dest='completed_issues', default=False, help='Shows only monorail issues that have completed (Fixed|Verified) ' 'by the user.') parser.add_option( '-o', '--output', metavar='<file>', help='Where to output the results. By default prints to stdout.') # Remove description formatting parser.format_description = ( lambda _: parser.description) # pylint: disable=no-member options, args = parser.parse_args() options.local_user = os.environ.get('USER') if args: parser.error('Args unsupported') if not options.user: parser.error('USER is not set, please use -u') options.user = username(options.user) logging.basicConfig(level=options.verbosity) # python-keyring provides easy access to the system keyring. try: import keyring # pylint: disable=unused-import,unused-variable,F0401 except ImportError: logging.warning('Consider installing python-keyring') if not options.begin: if options.last_quarter: begin, end = quarter_begin, quarter_end elif options.this_year: begin, end = get_year_of(datetime.today()) elif options.week_of: begin, end = (get_week_of(datetime.strptime(options.week_of, '%m/%d/%y'))) elif options.last_week: begin, end = (get_week_of(datetime.today() - timedelta(days=1 + 7 * options.last_week))) else: begin, end = (get_week_of(datetime.today() - timedelta(days=1))) else: begin = dateutil.parser.parse(options.begin) if options.end: end = dateutil.parser.parse(options.end) else: end = datetime.today() options.begin, options.end = begin, end if options.markdown: options.output_format_heading = '### {heading}\n' options.output_format = ' * [{title}]({url})' options.output_format_no_url = ' * {title}' logging.info('Searching for activity by %s', options.user) logging.info('Using range %s to %s', options.begin, options.end) my_activity = MyActivity(options) my_activity.show_progress('Loading data') if not (options.changes or options.reviews or options.issues or options.changes_by_issue): options.changes = True options.issues = True options.reviews = True # First do any required authentication so none of the user interaction has to # wait for actual work. if options.changes or options.changes_by_issue: my_activity.auth_for_changes() if options.reviews: my_activity.auth_for_reviews() logging.info('Looking up activity.....') try: if options.changes or options.changes_by_issue: my_activity.get_changes() if options.reviews: my_activity.get_reviews() if options.issues or options.changes_by_issue: my_activity.get_issues() if not options.no_referenced_issues: my_activity.get_referenced_issues() except auth.AuthenticationError as e: logging.error('auth.AuthenticationError: %s', e) my_activity.show_progress('\n') my_activity.print_access_errors() output_file = None try: if options.output: output_file = open(options.output, 'w') logging.info('Printing output to "%s"', options.output) sys.stdout = output_file except (IOError, OSError) as e: logging.error('Unable to write output: %s', e) else: if options.json: my_activity.dump_json() else: if options.changes: my_activity.print_changes() if options.reviews: my_activity.print_reviews() if options.issues: my_activity.print_issues() if options.changes_by_issue: my_activity.print_changes_by_issue( options.skip_own_issues_without_changes) finally: if output_file: logging.info('Done printing to file.') sys.stdout = sys.__stdout__ output_file.close() return 0
def create_option_parser(): parser = logging_utils.OptionParserWithLogging( usage='%prog <options> [command to run or extra args]', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) parser.add_option( '--clean', action='store_true', help='Cleans the cache, trimming it necessary and remove corrupted items ' 'and returns without executing anything; use with -v to know what ' 'was done') parser.add_option( '--no-clean', action='store_true', help='Do not clean the cache automatically on startup. This is meant for ' 'bots where a separate execution with --clean was done earlier so ' 'doing it again is redundant') parser.add_option( '--use-symlinks', action='store_true', help='Use symlinks instead of hardlinks') parser.add_option( '--json', help='dump output metadata to json file. When used, run_isolated returns ' 'non-zero only on internal failure') parser.add_option( '--hard-timeout', type='float', help='Enforce hard timeout in execution') parser.add_option( '--grace-period', type='float', help='Grace period between SIGTERM and SIGKILL') parser.add_option( '--raw-cmd', action='store_true', help='Ignore the isolated command, use the one supplied at the command ' 'line') parser.add_option( '--relative-cwd', help='Ignore the isolated \'relative_cwd\' and use this one instead; ' 'requires --raw-cmd') parser.add_option( '--env', default=[], action='append', help='Environment variables to set for the child process') parser.add_option( '--env-prefix', default=[], action='append', help='Specify a VAR=./path/fragment to put in the environment variable ' 'before executing the command. The path fragment must be relative ' 'to the isolated run directory, and must not contain a `..` token. ' 'The path will be made absolute and prepended to the indicated ' '$VAR using the OS\'s path separator. Multiple items for the same ' '$VAR will be prepended in order.') parser.add_option( '--bot-file', help='Path to a file describing the state of the host. The content is ' 'defined by on_before_task() in bot_config.') parser.add_option( '--switch-to-account', help='If given, switches LUCI_CONTEXT to given logical service account ' '(e.g. "task" or "system") before launching the isolated process.') parser.add_option( '--output', action='append', help='Specifies an output to return. If no outputs are specified, all ' 'files located in $(ISOLATED_OUTDIR) will be returned; ' 'otherwise, outputs in both $(ISOLATED_OUTDIR) and those ' 'specified by --output option (there can be multiple) will be ' 'returned. Note that if a file in OUT_DIR has the same path ' 'as an --output option, the --output version will be returned.') parser.add_option( '-a', '--argsfile', # This is actually handled in parse_args; it's included here purely so it # can make it into the help text. help='Specify a file containing a JSON array of arguments to this ' 'script. If --argsfile is provided, no other argument may be ' 'provided on the command line.') data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', help='Hash of the .isolated to grab from the isolate server.') isolateserver.add_isolate_server_options(data_group) parser.add_option_group(data_group) isolateserver.add_cache_options(parser) cipd.add_cipd_options(parser) group = optparse.OptionGroup(parser, 'Named caches') group.add_option( '--named-cache', dest='named_caches', action='append', nargs=2, default=[], help='A named cache to request. Accepts two arguments, name and path. ' 'name identifies the cache, must match regex [a-z0-9_]{1,4096}. ' 'path is a path relative to the run dir where the cache directory ' 'must be put to. ' 'This option can be specified more than once.') group.add_option( '--named-cache-root', default='named_caches', help='Cache root directory. Default=%default') parser.add_option_group(group) debug_group = optparse.OptionGroup(parser, 'Debugging') debug_group.add_option( '--leak-temp-dir', action='store_true', help='Deliberately leak isolate\'s temp dir for later examination. ' 'Default: %default') debug_group.add_option( '--root-dir', help='Use a directory instead of a random one') parser.add_option_group(debug_group) auth.add_auth_options(parser) parser.set_defaults(cache='cache', cipd_cache='cipd_cache') return parser
def main(): # Silence upload.py. rietveld.upload.verbosity = 0 parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option( '-u', '--user', metavar='<email>', default=os.environ.get('USER'), help='Filter on user, default=%default') parser.add_option( '-b', '--begin', metavar='<date>', help='Filter issues created after the date') parser.add_option( '-e', '--end', metavar='<date>', help='Filter issues created before the date') quarter_begin, quarter_end = get_quarter_of(datetime.today() - relativedelta(months=2)) parser.add_option( '-Q', '--last_quarter', action='store_true', help='Use last quarter\'s dates, i.e. %s to %s' % ( quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d'))) parser.add_option( '-Y', '--this_year', action='store_true', help='Use this year\'s dates') parser.add_option( '-w', '--week_of', metavar='<date>', help='Show issues for week of the date') parser.add_option( '-W', '--last_week', action='count', help='Show last week\'s issues. Use more times for more weeks.') parser.add_option( '-a', '--auth', action='store_true', help='Ask to authenticate for instances with no auth cookie') activity_types_group = optparse.OptionGroup(parser, 'Activity Types', 'By default, all activity will be looked up and ' 'printed. If any of these are specified, only ' 'those specified will be searched.') activity_types_group.add_option( '-c', '--changes', action='store_true', help='Show changes.') activity_types_group.add_option( '-i', '--issues', action='store_true', help='Show issues.') activity_types_group.add_option( '-r', '--reviews', action='store_true', help='Show reviews.') parser.add_option_group(activity_types_group) output_format_group = optparse.OptionGroup(parser, 'Output Format', 'By default, all activity will be printed in the ' 'following format: {url} {title}. This can be ' 'changed for either all activity types or ' 'individually for each activity type. The format ' 'is defined as documented for ' 'string.format(...). The variables available for ' 'all activity types are url, title and author. ' 'Format options for specific activity types will ' 'override the generic format.') output_format_group.add_option( '-f', '--output-format', metavar='<format>', default=u'{url} {title}', help='Specifies the format to use when printing all your activity.') output_format_group.add_option( '--output-format-changes', metavar='<format>', default=None, help='Specifies the format to use when printing changes. Supports the ' 'additional variable {reviewers}') output_format_group.add_option( '--output-format-issues', metavar='<format>', default=None, help='Specifies the format to use when printing issues. Supports the ' 'additional variable {owner}.') output_format_group.add_option( '--output-format-reviews', metavar='<format>', default=None, help='Specifies the format to use when printing reviews.') output_format_group.add_option( '--output-format-heading', metavar='<format>', default=u'{heading}:', help='Specifies the format to use when printing headings.') output_format_group.add_option( '-m', '--markdown', action='store_true', help='Use markdown-friendly output (overrides --output-format ' 'and --output-format-heading)') parser.add_option_group(output_format_group) auth.add_auth_options(parser) # Remove description formatting parser.format_description = ( lambda _: parser.description) # pylint: disable=E1101 options, args = parser.parse_args() options.local_user = os.environ.get('USER') if args: parser.error('Args unsupported') if not options.user: parser.error('USER is not set, please use -u') options.user = username(options.user) if not options.begin: if options.last_quarter: begin, end = quarter_begin, quarter_end elif options.this_year: begin, end = get_year_of(datetime.today()) elif options.week_of: begin, end = (get_week_of(datetime.strptime(options.week_of, '%m/%d/%y'))) elif options.last_week: begin, end = (get_week_of(datetime.today() - timedelta(days=1 + 7 * options.last_week))) else: begin, end = (get_week_of(datetime.today() - timedelta(days=1))) else: begin = datetime.strptime(options.begin, '%m/%d/%y') if options.end: end = datetime.strptime(options.end, '%m/%d/%y') else: end = datetime.today() options.begin, options.end = begin, end if options.markdown: options.output_format = ' * [{title}]({url})' options.output_format_heading = '### {heading} ###' print 'Searching for activity by %s' % options.user print 'Using range %s to %s' % (options.begin, options.end) my_activity = MyActivity(options) if not (options.changes or options.reviews or options.issues): options.changes = True options.issues = True options.reviews = True # First do any required authentication so none of the user interaction has to # wait for actual work. if options.changes: my_activity.auth_for_changes() if options.reviews: my_activity.auth_for_reviews() if options.issues: my_activity.auth_for_issues() print 'Looking up activity.....' if options.changes: my_activity.get_changes() if options.reviews: my_activity.get_reviews() if options.issues: my_activity.get_issues() print '\n\n\n' my_activity.print_changes() my_activity.print_reviews() my_activity.print_issues() return 0
def main(args): parser = logging_utils.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) parser.add_option( '--clean', action='store_true', help= 'Cleans the cache, trimming it necessary and remove corrupted items ' 'and returns without executing anything; use with -v to know what ' 'was done') parser.add_option( '--json', help= 'dump output metadata to json file. When used, run_isolated returns ' 'non-zero only on internal failure') parser.add_option('--hard-timeout', type='float', help='Enforce hard timeout in execution') parser.add_option('--grace-period', type='float', help='Grace period between SIGTERM and SIGKILL') data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', help='Hash of the .isolated to grab from the isolate server') isolateserver.add_isolate_server_options(data_group) parser.add_option_group(data_group) isolateserver.add_cache_options(parser) parser.set_defaults(cache='cache') debug_group = optparse.OptionGroup(parser, 'Debugging') debug_group.add_option( '--leak-temp-dir', action='store_true', help='Deliberately leak isolate\'s temp dir for later examination ' '[default: %default]') debug_group.add_option('--root-dir', help='Use a directory instead of a random one') parser.add_option_group(debug_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) cache = isolateserver.process_cache_options(options) if options.clean: if options.isolated: parser.error('Can\'t use --isolated with --clean.') if options.isolate_server: parser.error('Can\'t use --isolate-server with --clean.') if options.json: parser.error('Can\'t use --json with --clean.') cache.cleanup() return 0 auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) if options.root_dir: options.root_dir = unicode(os.path.abspath(options.root_dir)) if options.json: options.json = unicode(os.path.abspath(options.json)) if not options.isolated: parser.error('--isolated is required.') with isolateserver.get_storage(options.isolate_server, options.namespace) as storage: # Hashing schemes used by |storage| and |cache| MUST match. assert storage.hash_algo == cache.hash_algo return run_tha_test(options.isolated, storage, cache, options.leak_temp_dir, options.json, options.root_dir, options.hard_timeout, options.grace_period, args)
def main(): # Silence upload.py. rietveld.upload.verbosity = 0 parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option('-u', '--user', metavar='<email>', default=os.environ.get('USER'), help='Filter on user, default=%default') parser.add_option('-b', '--begin', metavar='<date>', help='Filter issues created after the date (mm/dd/yy)') parser.add_option('-e', '--end', metavar='<date>', help='Filter issues created before the date (mm/dd/yy)') quarter_begin, quarter_end = get_quarter_of(datetime.today() - relativedelta(months=2)) parser.add_option( '-Q', '--last_quarter', action='store_true', help='Use last quarter\'s dates, i.e. %s to %s' % (quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d'))) parser.add_option('-Y', '--this_year', action='store_true', help='Use this year\'s dates') parser.add_option('-w', '--week_of', metavar='<date>', help='Show issues for week of the date (mm/dd/yy)') parser.add_option( '-W', '--last_week', action='count', help='Show last week\'s issues. Use more times for more weeks.') parser.add_option( '-a', '--auth', action='store_true', help='Ask to authenticate for instances with no auth cookie') parser.add_option('-d', '--deltas', action='store_true', help='Fetch deltas for changes (slow).') activity_types_group = optparse.OptionGroup( parser, 'Activity Types', 'By default, all activity will be looked up and ' 'printed. If any of these are specified, only ' 'those specified will be searched.') activity_types_group.add_option('-c', '--changes', action='store_true', help='Show changes.') activity_types_group.add_option('-i', '--issues', action='store_true', help='Show issues.') activity_types_group.add_option('-r', '--reviews', action='store_true', help='Show reviews.') parser.add_option_group(activity_types_group) output_format_group = optparse.OptionGroup( parser, 'Output Format', 'By default, all activity will be printed in the ' 'following format: {url} {title}. This can be ' 'changed for either all activity types or ' 'individually for each activity type. The format ' 'is defined as documented for ' 'string.format(...). The variables available for ' 'all activity types are url, title and author. ' 'Format options for specific activity types will ' 'override the generic format.') output_format_group.add_option( '-f', '--output-format', metavar='<format>', default=u'{url} {title}', help='Specifies the format to use when printing all your activity.') output_format_group.add_option( '--output-format-changes', metavar='<format>', default=None, help='Specifies the format to use when printing changes. Supports the ' 'additional variable {reviewers}') output_format_group.add_option( '--output-format-issues', metavar='<format>', default=None, help='Specifies the format to use when printing issues. Supports the ' 'additional variable {owner}.') output_format_group.add_option( '--output-format-reviews', metavar='<format>', default=None, help='Specifies the format to use when printing reviews.') output_format_group.add_option( '--output-format-heading', metavar='<format>', default=u'{heading}:', help='Specifies the format to use when printing headings.') output_format_group.add_option( '-m', '--markdown', action='store_true', help='Use markdown-friendly output (overrides --output-format ' 'and --output-format-heading)') output_format_group.add_option( '-j', '--json', action='store_true', help='Output json data (overrides other format options)') parser.add_option_group(output_format_group) auth.add_auth_options(parser) parser.add_option('-v', '--verbose', action='store_const', dest='verbosity', default=logging.WARN, const=logging.INFO, help='Output extra informational messages.') parser.add_option('-q', '--quiet', action='store_const', dest='verbosity', const=logging.ERROR, help='Suppress non-error messages.') parser.add_option( '-o', '--output', metavar='<file>', help='Where to output the results. By default prints to stdout.') # Remove description formatting parser.format_description = (lambda _: parser.description) # pylint: disable=no-member options, args = parser.parse_args() options.local_user = os.environ.get('USER') if args: parser.error('Args unsupported') if not options.user: parser.error('USER is not set, please use -u') options.user = username(options.user) logging.basicConfig(level=options.verbosity) # python-keyring provides easy access to the system keyring. try: import keyring # pylint: disable=unused-import,unused-variable,F0401 except ImportError: logging.warning('Consider installing python-keyring') if not options.begin: if options.last_quarter: begin, end = quarter_begin, quarter_end elif options.this_year: begin, end = get_year_of(datetime.today()) elif options.week_of: begin, end = (get_week_of( datetime.strptime(options.week_of, '%m/%d/%y'))) elif options.last_week: begin, end = ( get_week_of(datetime.today() - timedelta(days=1 + 7 * options.last_week))) else: begin, end = (get_week_of(datetime.today() - timedelta(days=1))) else: begin = dateutil.parser.parse(options.begin) if options.end: end = dateutil.parser.parse(options.end) else: end = datetime.today() options.begin, options.end = begin, end if options.markdown: options.output_format = ' * [{title}]({url})' options.output_format_heading = '### {heading} ###' logging.info('Searching for activity by %s', options.user) logging.info('Using range %s to %s', options.begin, options.end) my_activity = MyActivity(options) if not (options.changes or options.reviews or options.issues): options.changes = True options.issues = True options.reviews = True # First do any required authentication so none of the user interaction has to # wait for actual work. if options.changes: my_activity.auth_for_changes() if options.reviews: my_activity.auth_for_reviews() logging.info('Looking up activity.....') try: if options.changes: my_activity.get_changes() if options.reviews: my_activity.get_reviews() if options.issues: my_activity.get_issues() except auth.AuthenticationError as e: logging.error('auth.AuthenticationError: %s', e) output_file = None try: if options.output: output_file = open(options.output, 'w') logging.info('Printing output to "%s"', options.output) sys.stdout = output_file except (IOError, OSError) as e: logging.error('Unable to write output: %s', e) else: if options.json: my_activity.dump_json() else: my_activity.print_changes() my_activity.print_reviews() my_activity.print_issues() finally: if output_file: logging.info('Done printing to file.') sys.stdout = sys.__stdout__ output_file.close() return 0
def create_option_parser(): parser = logging_utils.OptionParserWithLogging( usage='%prog <options> [command to run or extra args]', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) parser.add_option( '--clean', action='store_true', help='Cleans the cache, trimming it necessary and remove corrupted items ' 'and returns without executing anything; use with -v to know what ' 'was done') parser.add_option( '--no-clean', action='store_true', help='Do not clean the cache automatically on startup. This is meant for ' 'bots where a separate execution with --clean was done earlier so ' 'doing it again is redundant') parser.add_option( '--use-symlinks', action='store_true', help='Use symlinks instead of hardlinks') parser.add_option( '--json', help='dump output metadata to json file. When used, run_isolated returns ' 'non-zero only on internal failure') parser.add_option( '--hard-timeout', type='float', help='Enforce hard timeout in execution') parser.add_option( '--grace-period', type='float', help='Grace period between SIGTERM and SIGKILL') parser.add_option( '--bot-file', help='Path to a file describing the state of the host. The content is ' 'defined by on_before_task() in bot_config.') parser.add_option( '--switch-to-account', help='If given, switches LUCI_CONTEXT to given logical service account ' '(e.g. "task" or "system") before launching the isolated process.') parser.add_option( '--output', action='append', help='Specifies an output to return. If no outputs are specified, all ' 'files located in $(ISOLATED_OUTDIR) will be returned; ' 'otherwise, outputs in both $(ISOLATED_OUTDIR) and those ' 'specified by --output option (there can be multiple) will be ' 'returned. Note that if a file in OUT_DIR has the same path ' 'as an --output option, the --output version will be returned.') parser.add_option( '-a', '--argsfile', # This is actually handled in parse_args; it's included here purely so it # can make it into the help text. help='Specify a file containing a JSON array of arguments to this ' 'script. If --argsfile is provided, no other argument may be ' 'provided on the command line.') data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', help='Hash of the .isolated to grab from the isolate server.') isolateserver.add_isolate_server_options(data_group) parser.add_option_group(data_group) isolateserver.add_cache_options(parser) cipd.add_cipd_options(parser) named_cache.add_named_cache_options(parser) debug_group = optparse.OptionGroup(parser, 'Debugging') debug_group.add_option( '--leak-temp-dir', action='store_true', help='Deliberately leak isolate\'s temp dir for later examination. ' 'Default: %default') debug_group.add_option( '--root-dir', help='Use a directory instead of a random one') parser.add_option_group(debug_group) auth.add_auth_options(parser) parser.set_defaults( cache='cache', cipd_cache='cipd_cache', named_cache_root='named_caches') return parser
def create_option_parser(): parser = logging_utils.OptionParserWithLogging( usage='%prog <options> [command to run or extra args]', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) parser.add_option( '--clean', action='store_true', help= 'Cleans the cache, trimming it necessary and remove corrupted items ' 'and returns without executing anything; use with -v to know what ' 'was done') parser.add_option( '--no-clean', action='store_true', help= 'Do not clean the cache automatically on startup. This is meant for ' 'bots where a separate execution with --clean was done earlier so ' 'doing it again is redundant') parser.add_option('--use-symlinks', action='store_true', help='Use symlinks instead of hardlinks') parser.add_option( '--json', help= 'dump output metadata to json file. When used, run_isolated returns ' 'non-zero only on internal failure') parser.add_option('--hard-timeout', type='float', help='Enforce hard timeout in execution') parser.add_option('--grace-period', type='float', help='Grace period between SIGTERM and SIGKILL') parser.add_option( '--bot-file', help='Path to a file describing the state of the host. The content is ' 'defined by on_before_task() in bot_config.') data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', help='Hash of the .isolated to grab from the isolate server.') isolateserver.add_isolate_server_options(data_group) parser.add_option_group(data_group) isolateserver.add_cache_options(parser) cipd.add_cipd_options(parser) debug_group = optparse.OptionGroup(parser, 'Debugging') debug_group.add_option( '--leak-temp-dir', action='store_true', help='Deliberately leak isolate\'s temp dir for later examination. ' 'Default: %default') debug_group.add_option('--root-dir', help='Use a directory instead of a random one') parser.add_option_group(debug_group) auth.add_auth_options(parser) parser.set_defaults(cache='cache', cipd_cache='cipd_cache') return parser
def main(args): """Runs cpplint on the current changelist.""" """Adapted from git_cl.py CMDlint """ parser = git_cl.OptionParser() parser.add_option( '--filter', action='append', metavar='-x,+y', help='Comma-separated list of cpplint\'s category-filters') parser.add_option('--project_root') auth.add_auth_options(parser) options, args = parser.parse_args(args) auth_config = auth.extract_auth_config_from_options(options) # Access to a protected member _XX of a client class # pylint: disable=protected-access try: import cpplint import cpplint_chromium except ImportError: print( 'Your depot_tools is missing cpplint.py and/or cpplint_chromium.py.' ) return 1 # Change the current working directory before calling lint so that it # shows the correct base. settings = git_cl.settings previous_cwd = os.getcwd() os.chdir(settings.GetRoot()) try: cl = git_cl.Changelist(auth_config=auth_config) change = cl.GetChange(cl.GetCommonAncestorWithUpstream(), None) files = [f.LocalPath() for f in change.AffectedFiles()] if not files: print('Cannot lint an empty CL') return 0 # Process cpplints arguments if any. command = args + files if options.filter: command = ['--filter=' + ','.join(options.filter)] + command if options.project_root: command = ['--project_root=' + options.project_root] + command filenames = cpplint.ParseArguments(command) white_regex = re.compile(settings.GetLintRegex()) black_regex = re.compile(settings.GetLintIgnoreRegex()) extra_check_functions = [ cpplint_chromium.CheckPointerDeclarationWhitespace ] for filename in filenames: if white_regex.match(filename): if black_regex.match(filename): print('Ignoring file %s' % filename) else: cpplint.ProcessFile(filename, cpplint._cpplint_state.verbose_level, extra_check_functions) else: print('Skipping file %s' % filename) finally: os.chdir(previous_cwd) print('Total errors found: %d\n' % cpplint._cpplint_state.error_count) if cpplint._cpplint_state.error_count != 0: return 1 return 0
def CMDbatcharchive(parser, args): """Archives multiple isolated trees at once. Using single command instead of multiple sequential invocations allows to cut redundant work when isolated trees share common files (e.g. file hashes are checked only once, their presence on the server is checked only once, and so on). Takes a list of paths to *.isolated.gen.json files that describe what trees to isolate. Format of files is: { "version": 1, "dir": <absolute path to a directory all other paths are relative to>, "args": [list of command line arguments for single 'archive' command] } """ isolateserver.add_isolate_server_options(parser) isolateserver.add_archive_options(parser) auth.add_auth_options(parser) parser.add_option( "--dump-json", metavar="FILE", help="Write isolated hashes of archived trees to this file as JSON" ) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) # Validate all incoming options, prepare what needs to be archived as a list # of tuples (archival options, working directory). work_units = [] for gen_json_path in args: # Validate JSON format of a *.isolated.gen.json file. data = tools.read_json(gen_json_path) if data.get("version") != ISOLATED_GEN_JSON_VERSION: parser.error("Invalid version in %s" % gen_json_path) cwd = data.get("dir") if not isinstance(cwd, unicode) or not os.path.isdir(cwd): parser.error("Invalid dir in %s" % gen_json_path) args = data.get("args") if not isinstance(args, list) or not all(isinstance(x, unicode) for x in args): parser.error("Invalid args in %s" % gen_json_path) # Convert command line (embedded in JSON) to Options object. work_units.append((parse_archive_command_line(args, cwd), cwd)) # Perform the archival, all at once. isolated_hashes = isolate_and_archive(work_units, options.isolate_server, options.namespace) # TODO(vadimsh): isolate_and_archive returns None on upload failure, there's # no way currently to figure out what *.isolated file from a batch were # successfully uploaded, so consider them all failed (and emit empty dict # as JSON result). if options.dump_json: tools.write_json(options.dump_json, isolated_hashes or {}, False) if isolated_hashes is None: return EXIT_CODE_UPLOAD_ERROR # isolated_hashes[x] is None if 'x.isolate' contains a error. if not all(isolated_hashes.itervalues()): return EXIT_CODE_ISOLATE_ERROR return 0
def main(): # TODO(pgervais): This function is way too long. Split. sys.stdout = Unbuffered(sys.stdout) parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option('-v', '--verbose', action='count', default=0, help='Prints debugging infos') parser.add_option( '-e', '--email', help='Email address to access rietveld. If not specified, anonymous ' 'access will be used.') parser.add_option( '-E', '--email-file', help='File containing the email address to access rietveld. ' 'If not specified, anonymous access will be used.') parser.add_option( '-k', '--private-key-file', help='Path to file containing a private key in p12 format for OAuth2 ' 'authentication with "notasecret" password (as generated by Google ' 'Cloud Console).') parser.add_option('-i', '--issue', type='int', help='Rietveld issue number') parser.add_option('-p', '--patchset', type='int', help='Rietveld issue\'s patchset number') parser.add_option('-r', '--root_dir', default=os.getcwd(), help='Root directory to apply the patch') parser.add_option('-s', '--server', default='http://codereview.chromium.org', help='Rietveld server') parser.add_option('--no-auth', action='store_true', help='Do not attempt authenticated requests.') parser.add_option('--revision-mapping', default='{}', help='When running gclient, annotate the got_revisions ' 'using the revision-mapping.') parser.add_option('-f', '--force', action='store_true', help='Really run apply_issue, even if .update.flag ' 'is detected.') parser.add_option('-b', '--base_ref', help='DEPRECATED do not use.') parser.add_option('--whitelist', action='append', default=[], help='Patch only specified file(s).') parser.add_option('--blacklist', action='append', default=[], help='Don\'t patch specified file(s).') parser.add_option('-d', '--ignore_deps', action='store_true', help='Don\'t run gclient sync on DEPS changes.') auth.add_auth_options(parser) options, args = parser.parse_args() auth_config = auth.extract_auth_config_from_options(options) if options.whitelist and options.blacklist: parser.error('Cannot specify both --whitelist and --blacklist') if options.email and options.email_file: parser.error('-e and -E options are incompatible') if (os.path.isfile(os.path.join(os.getcwd(), 'update.flag')) and not options.force): print 'update.flag file found: bot_update has run and checkout is already ' print 'in a consistent state. No actions will be performed in this step.' return 0 logging.basicConfig( format='%(levelname)5s %(module)11s(%(lineno)4d): %(message)s', level=[logging.WARNING, logging.INFO, logging.DEBUG][min(2, options.verbose)]) if args: parser.error('Extra argument(s) "%s" not understood' % ' '.join(args)) if not options.issue: parser.error('Require --issue') options.server = options.server.rstrip('/') if not options.server: parser.error('Require a valid server') options.revision_mapping = json.loads(options.revision_mapping) # read email if needed if options.email_file: if not os.path.exists(options.email_file): parser.error('file does not exist: %s' % options.email_file) with open(options.email_file, 'rb') as f: options.email = f.read().strip() print('Connecting to %s' % options.server) # Always try un-authenticated first, except for OAuth2 if options.private_key_file: # OAuth2 authentication obj = rietveld.JwtOAuth2Rietveld(options.server, options.email, options.private_key_file) properties = obj.get_issue_properties(options.issue, False) else: # Passing None as auth_config disables authentication. obj = rietveld.Rietveld(options.server, None) properties = None # Bad except clauses order (HTTPError is an ancestor class of # ClientLoginError) # pylint: disable=E0701 try: properties = obj.get_issue_properties(options.issue, False) except urllib2.HTTPError as e: if e.getcode() != 302: raise if options.no_auth: exit('FAIL: Login detected -- is issue private?') # TODO(maruel): A few 'Invalid username or password.' are printed first, # we should get rid of those. except rietveld.upload.ClientLoginError as e: # Fine, we'll do proper authentication. pass if properties is None: obj = rietveld.Rietveld(options.server, auth_config, options.email) try: properties = obj.get_issue_properties(options.issue, False) except rietveld.upload.ClientLoginError as e: print('Accessing the issue requires proper credentials.') return 1 if not options.patchset: options.patchset = properties['patchsets'][-1] print('No patchset specified. Using patchset %d' % options.patchset) issues_patchsets_to_apply = [(options.issue, options.patchset)] # Temporary comment out the below while we try to figure out what to do with # patchset dependencies on trybots. # TODO(rmistry): Uncomment the below after a solution is found. # depends_on_info = obj.get_depends_on_patchset(options.issue, # options.patchset) # while depends_on_info: # depends_on_issue = int(depends_on_info['issue']) # depends_on_patchset = int(depends_on_info['patchset']) # try: # depends_on_info = obj.get_depends_on_patchset(depends_on_issue, # depends_on_patchset) # issues_patchsets_to_apply.insert(0, (depends_on_issue, # depends_on_patchset)) # except urllib2.HTTPError: # print ('The patchset that was marked as a dependency no longer ' # 'exists: %s/%d/#ps%d' % ( # options.server, depends_on_issue, depends_on_patchset)) # print 'Therefore it is likely that this patch will not apply cleanly.' # print # depends_on_info = None num_issues_patchsets_to_apply = len(issues_patchsets_to_apply) if num_issues_patchsets_to_apply > 1: print print 'apply_issue.py found %d dependent CLs.' % ( num_issues_patchsets_to_apply - 1) print 'They will be applied in the following order:' num = 1 for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply: print ' #%d %s/%d/#ps%d' % (num, options.server, issue_to_apply, patchset_to_apply) num += 1 print for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply: issue_url = '%s/%d/#ps%d' % (options.server, issue_to_apply, patchset_to_apply) print('Downloading patch from %s' % issue_url) try: patchset = obj.get_patch(issue_to_apply, patchset_to_apply) except urllib2.HTTPError as e: print( 'Failed to fetch the patch for issue %d, patchset %d.\n' 'Try visiting %s/%d') % (issue_to_apply, patchset_to_apply, options.server, issue_to_apply) return 1 if options.whitelist: patchset.patches = [ patch for patch in patchset.patches if patch.filename in options.whitelist ] if options.blacklist: patchset.patches = [ patch for patch in patchset.patches if patch.filename not in options.blacklist ] for patch in patchset.patches: print(patch) full_dir = os.path.abspath(options.root_dir) scm_type = scm.determine_scm(full_dir) if scm_type == 'svn': scm_obj = checkout.SvnCheckout(full_dir, None, None, None, None) elif scm_type == 'git': scm_obj = checkout.GitCheckout(full_dir, None, None, None, None) elif scm_type == None: scm_obj = checkout.RawCheckout(full_dir, None, None) else: parser.error('Couldn\'t determine the scm') # TODO(maruel): HACK, remove me. # When run a build slave, make sure buildbot knows that the checkout was # modified. if options.root_dir == 'src' and getpass.getuser() == 'chrome-bot': # See sourcedirIsPatched() in: # http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/ # chromium_commands.py?view=markup open('.buildbot-patched', 'w').close() print('\nApplying the patch from %s' % issue_url) try: scm_obj.apply_patch(patchset, verbose=True) except checkout.PatchApplicationFailed as e: print(str(e)) print('CWD=%s' % os.getcwd()) print('Checkout path=%s' % scm_obj.project_path) return 1 if ('DEPS' in map(os.path.basename, patchset.filenames) and not options.ignore_deps): gclient_root = gclient_utils.FindGclientRoot(full_dir) if gclient_root and scm_type: print( 'A DEPS file was updated inside a gclient checkout, running gclient ' 'sync.') gclient_path = os.path.join(BASE_DIR, 'gclient') if sys.platform == 'win32': gclient_path += '.bat' with annotated_gclient.temp_filename(suffix='gclient') as f: cmd = [ gclient_path, 'sync', '--nohooks', '--delete_unversioned_trees', ] if scm_type == 'svn': cmd.extend(['--revision', 'BASE']) if options.revision_mapping: cmd.extend(['--output-json', f]) retcode = subprocess.call(cmd, cwd=gclient_root) if retcode == 0 and options.revision_mapping: revisions = annotated_gclient.parse_got_revision( f, options.revision_mapping) annotated_gclient.emit_buildprops(revisions) return retcode return 0
def main(): # Silence upload.py. rietveld.upload.verbosity = 0 parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option('-u', '--user', metavar='<email>', default=os.environ.get('USER'), help='Filter on user, default=%default') parser.add_option('-b', '--begin', metavar='<date>', help='Filter issues created after the date (mm/dd/yy)') parser.add_option('-e', '--end', metavar='<date>', help='Filter issues created before the date (mm/dd/yy)') quarter_begin, quarter_end = get_quarter_of(datetime.today() - relativedelta(months=2)) parser.add_option( '-Q', '--last_quarter', action='store_true', help='Use last quarter\'s dates, i.e. %s to %s' % (quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d'))) parser.add_option('-Y', '--this_year', action='store_true', help='Use this year\'s dates') parser.add_option('-w', '--week_of', metavar='<date>', help='Show issues for week of the date (mm/dd/yy)') parser.add_option( '-W', '--last_week', action='count', help='Show last week\'s issues. Use more times for more weeks.') parser.add_option( '-a', '--auth', action='store_true', help='Ask to authenticate for instances with no auth cookie') activity_types_group = optparse.OptionGroup( parser, 'Activity Types', 'By default, all activity will be looked up and ' 'printed. If any of these are specified, only ' 'those specified will be searched.') activity_types_group.add_option('-c', '--changes', action='store_true', help='Show changes.') activity_types_group.add_option('-i', '--issues', action='store_true', help='Show issues.') activity_types_group.add_option('-r', '--reviews', action='store_true', help='Show reviews.') parser.add_option_group(activity_types_group) output_format_group = optparse.OptionGroup( parser, 'Output Format', 'By default, all activity will be printed in the ' 'following format: {url} {title}. This can be ' 'changed for either all activity types or ' 'individually for each activity type. The format ' 'is defined as documented for ' 'string.format(...). The variables available for ' 'all activity types are url, title and author. ' 'Format options for specific activity types will ' 'override the generic format.') output_format_group.add_option( '-f', '--output-format', metavar='<format>', default=u'{url} {title}', help='Specifies the format to use when printing all your activity.') output_format_group.add_option( '--output-format-changes', metavar='<format>', default=None, help='Specifies the format to use when printing changes. Supports the ' 'additional variable {reviewers}') output_format_group.add_option( '--output-format-issues', metavar='<format>', default=None, help='Specifies the format to use when printing issues. Supports the ' 'additional variable {owner}.') output_format_group.add_option( '--output-format-reviews', metavar='<format>', default=None, help='Specifies the format to use when printing reviews.') output_format_group.add_option( '--output-format-heading', metavar='<format>', default=u'{heading}:', help='Specifies the format to use when printing headings.') output_format_group.add_option( '-m', '--markdown', action='store_true', help='Use markdown-friendly output (overrides --output-format ' 'and --output-format-heading)') parser.add_option_group(output_format_group) auth.add_auth_options(parser) # Remove description formatting parser.format_description = (lambda _: parser.description) # pylint: disable=E1101 options, args = parser.parse_args() options.local_user = os.environ.get('USER') if args: parser.error('Args unsupported') if not options.user: parser.error('USER is not set, please use -u') options.user = username(options.user) if not options.begin: if options.last_quarter: begin, end = quarter_begin, quarter_end elif options.this_year: begin, end = get_year_of(datetime.today()) elif options.week_of: begin, end = (get_week_of( datetime.strptime(options.week_of, '%m/%d/%y'))) elif options.last_week: begin, end = ( get_week_of(datetime.today() - timedelta(days=1 + 7 * options.last_week))) else: begin, end = (get_week_of(datetime.today() - timedelta(days=1))) else: begin = datetime.strptime(options.begin, '%m/%d/%y') if options.end: end = datetime.strptime(options.end, '%m/%d/%y') else: end = datetime.today() options.begin, options.end = begin, end if options.markdown: options.output_format = ' * [{title}]({url})' options.output_format_heading = '### {heading} ###' print 'Searching for activity by %s' % options.user print 'Using range %s to %s' % (options.begin, options.end) my_activity = MyActivity(options) if not (options.changes or options.reviews or options.issues): options.changes = True options.issues = True options.reviews = True # First do any required authentication so none of the user interaction has to # wait for actual work. if options.changes: my_activity.auth_for_changes() if options.reviews: my_activity.auth_for_reviews() print 'Looking up activity.....' try: if options.changes: my_activity.get_changes() if options.reviews: my_activity.get_reviews() if options.issues: my_activity.get_issues() except auth.AuthenticationError as e: print "auth.AuthenticationError: %s" % e print '\n\n\n' my_activity.print_changes() my_activity.print_reviews() my_activity.print_issues() return 0
def main(): # Silence upload.py. rietveld.upload.verbosity = 0 today = datetime.date.today() begin, end = get_previous_quarter(today) default_email = os.environ.get('EMAIL_ADDRESS') if not default_email: user = os.environ.get('USER') if user: default_email = user + '@chromium.org' parser = optparse.OptionParser(description=__doc__) parser.add_option('--count', action='store_true', help='Just count instead of printing individual issues') parser.add_option('-r', '--reviewer', metavar='<email>', default=default_email, help='Filter on issue reviewer, default=%default') parser.add_option('-b', '--begin', metavar='<date>', help='Filter issues created after the date') parser.add_option('-e', '--end', metavar='<date>', help='Filter issues created before the date') parser.add_option('-Q', '--last_quarter', action='store_true', help='Use last quarter\'s dates, e.g. %s to %s' % (begin, end)) parser.add_option('-i', '--instance_url', metavar='<host>', default='http://codereview.chromium.org', help='Host to use, default is %default') auth.add_auth_options(parser) # Remove description formatting parser.format_description = (lambda _: parser.description) # pylint: disable=no-member options, args = parser.parse_args() auth_config = auth.extract_auth_config_from_options(options) if args: parser.error('Args unsupported') if options.reviewer is None: parser.error('$EMAIL_ADDRESS and $USER are not set, please use -r') print >> sys.stderr, 'Searching for reviews by %s' % options.reviewer if options.last_quarter: options.begin = begin options.end = end print >> sys.stderr, 'Using range %s to %s' % (options.begin, options.end) else: if options.begin is None or options.end is None: parser.error( 'Please specify either --last_quarter or --begin and --end') # Validate dates. try: options.begin = dateutil.parser.parse( options.begin).strftime('%Y-%m-%d') options.end = dateutil.parser.parse(options.end).strftime('%Y-%m-%d') except ValueError as e: parser.error('%s: %s - %s' % (e, options.begin, options.end)) if options.count: print_count(options.reviewer, options.begin, options.end, options.instance_url, auth_config) else: print_reviews(options.reviewer, options.begin, options.end, options.instance_url, auth_config) return 0
def main(args): tools.disable_buffering() parser = tools.OptionParserWithLogging( usage='%prog <options>', version=__version__, log_file=RUN_ISOLATED_LOG_FILE) data_group = optparse.OptionGroup(parser, 'Data source') data_group.add_option( '-s', '--isolated', metavar='FILE', help='File/url describing what to map or run') data_group.add_option( '-H', '--hash', help='Hash of the .isolated to grab from the hash table') isolateserver.add_isolate_server_options(data_group, True) parser.add_option_group(data_group) cache_group = optparse.OptionGroup(parser, 'Cache management') cache_group.add_option( '--cache', default='cache', metavar='DIR', help='Cache directory, default=%default') cache_group.add_option( '--max-cache-size', type='int', metavar='NNN', default=20*1024*1024*1024, help='Trim if the cache gets larger than this value, default=%default') cache_group.add_option( '--min-free-space', type='int', metavar='NNN', default=2*1024*1024*1024, help='Trim if disk free space becomes lower than this value, ' 'default=%default') cache_group.add_option( '--max-items', type='int', metavar='NNN', default=100000, help='Trim if more than this number of items are in the cache ' 'default=%default') parser.add_option_group(cache_group) auth.add_auth_options(parser) options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(data_group, options) if bool(options.isolated) == bool(options.hash): logging.debug('One and only one of --isolated or --hash is required.') parser.error('One and only one of --isolated or --hash is required.') options.cache = os.path.abspath(options.cache) policies = CachePolicies( options.max_cache_size, options.min_free_space, options.max_items) algo = isolateserver.get_hash_algo(options.namespace) try: # |options.cache| may not exist until DiskCache() instance is created. cache = DiskCache(options.cache, policies, algo) remote = options.isolate_server or options.indir with isolateserver.get_storage(remote, options.namespace) as storage: return run_tha_test( options.isolated or options.hash, storage, cache, algo, args) except Exception as e: # Make sure any exception is logged. tools.report_error(e) logging.exception(e) return 1
def CMDbatcharchive(parser, args): """Archives multiple isolated trees at once. Using single command instead of multiple sequential invocations allows to cut redundant work when isolated trees share common files (e.g. file hashes are checked only once, their presence on the server is checked only once, and so on). Takes a list of paths to *.isolated.gen.json files that describe what trees to isolate. Format of files is: { "version": 1, "dir": <absolute path to a directory all other paths are relative to>, "args": [list of command line arguments for single 'archive' command] } """ isolateserver.add_isolate_server_options(parser) isolateserver.add_archive_options(parser) auth.add_auth_options(parser) parser.add_option( '--dump-json', metavar='FILE', help='Write isolated hashes of archived trees to this file as JSON') options, args = parser.parse_args(args) auth.process_auth_options(parser, options) isolateserver.process_isolate_server_options(parser, options, True) # Validate all incoming options, prepare what needs to be archived as a list # of tuples (archival options, working directory). work_units = [] for gen_json_path in args: # Validate JSON format of a *.isolated.gen.json file. data = tools.read_json(gen_json_path) if data.get('version') != ISOLATED_GEN_JSON_VERSION: parser.error('Invalid version in %s' % gen_json_path) cwd = data.get('dir') if not isinstance(cwd, unicode) or not fs.isdir(cwd): parser.error('Invalid dir in %s' % gen_json_path) args = data.get('args') if (not isinstance(args, list) or not all(isinstance(x, unicode) for x in args)): parser.error('Invalid args in %s' % gen_json_path) # Convert command line (embedded in JSON) to Options object. work_units.append((parse_archive_command_line(args, cwd), cwd)) # Perform the archival, all at once. isolated_hashes = isolate_and_archive( work_units, options.isolate_server, options.namespace) # TODO(vadimsh): isolate_and_archive returns None on upload failure, there's # no way currently to figure out what *.isolated file from a batch were # successfully uploaded, so consider them all failed (and emit empty dict # as JSON result). if options.dump_json: tools.write_json(options.dump_json, isolated_hashes or {}, False) if isolated_hashes is None: return EXIT_CODE_UPLOAD_ERROR # isolated_hashes[x] is None if 'x.isolate' contains a error. if not all(isolated_hashes.itervalues()): return EXIT_CODE_ISOLATE_ERROR return 0